%matplotlib inline
import pandas as pd
import numpy as np
from statsmodels.stats.multicomp import pairwise_tukeyhsd
from statsmodels.stats.multicomp import MultiComparison
from statsmodels.formula.api import ols
from scipy import stats
data = pd.read_csv("playlists.csv", sep=";", encoding = "ISO-8859-1")
data.describe(include="all")
| company | playlist_sample | namesfiles | no | artist | song | sampleratefiles | totalsamplesfiles | durationfiles | bitratefiles | ... | chromagramfiles_5 | chromagramfiles_6 | chromagramfiles_7 | chromagramfiles_8 | chromagramfiles_9 | chromagramfiles_10 | chromagramfiles_11 | chromagramfiles_12 | attackslopefiles | attackleapfiles | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 1782 | 1782.000000 | 1782 | 1782.000000 | 1782 | 1782 | 1782.0 | 1.782000e+03 | 1782.000000 | 1782.000000 | ... | 1782.000000 | 1782.000000 | 1782.000000 | 1782.000000 | 1782.000000 | 1782.000000 | 1782.000000 | 1782.000000 | 1782.000000 | 1782.000000 |
| unique | 6 | NaN | 515 | NaN | 353 | 443 | NaN | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
| top | Arte Francés | NaN | 06 - Goldfish - We Come Together (Fishybeat Mi... | NaN | Satin Jackets | Mirage.mp3 ... | NaN | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
| freq | 441 | NaN | 6 | NaN | 51 | 12 | NaN | NaN | NaN | NaN | ... | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN | NaN |
| mean | NaN | 2.084175 | NaN | 17.116162 | NaN | NaN | 44100.0 | 1.043632e+07 | 236.651237 | 252.336700 | ... | 0.332301 | 0.319191 | 0.265246 | 0.440462 | 0.549565 | 0.581967 | 0.477825 | 0.430522 | 15.804409 | 0.507503 |
| std | NaN | 1.114796 | NaN | 11.837401 | NaN | NaN | 0.0 | 3.227105e+06 | 73.176981 | 88.377597 | ... | 0.270616 | 0.263919 | 0.249612 | 0.290454 | 0.314771 | 0.323173 | 0.321646 | 0.295563 | 9.338659 | 0.247587 |
| min | NaN | 1.000000 | NaN | 1.000000 | NaN | NaN | 44100.0 | 5.965054e+06 | 135.262000 | 128.000000 | ... | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.000000 | 0.168304 |
| 25% | NaN | 1.000000 | NaN | 8.000000 | NaN | NaN | 44100.0 | 8.353151e+06 | 189.413850 | 128.000000 | ... | 0.116591 | 0.109123 | 0.075378 | 0.204738 | 0.301961 | 0.316822 | 0.214680 | 0.203097 | 9.810711 | 0.285584 |
| 50% | NaN | 2.000000 | NaN | 15.500000 | NaN | NaN | 44100.0 | 9.480378e+06 | 214.974562 | 320.000000 | ... | 0.262799 | 0.259887 | 0.183082 | 0.396861 | 0.520957 | 0.570088 | 0.418077 | 0.379737 | 14.833864 | 0.452456 |
| 75% | NaN | 3.000000 | NaN | 24.000000 | NaN | NaN | 44100.0 | 1.146931e+07 | 260.075075 | 320.000000 | ... | 0.494897 | 0.469603 | 0.384485 | 0.642814 | 0.829318 | 0.918554 | 0.735384 | 0.616558 | 19.964413 | 0.730669 |
| max | NaN | 5.000000 | NaN | 65.000000 | NaN | NaN | 44100.0 | 2.843136e+07 | 644.702000 | 320.000000 | ... | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 1.000000 | 66.233620 | 0.999408 |
11 rows × 64 columns
Find positives and negatives songs of the process for every company.
companies = data['company'].unique()
by_company = [data[data.company == company] for company in companies]
positives = []
negatives = []
for data_com in by_company:
data_com = data_com.sort_values('playlist_sample')
last_pl = int(data_com.tail(1).playlist_sample)
pls = pd.DataFrame({'pl':range (1,last_pl+1), 'old':[0]*last_pl, 'new':[0]*last_pl})
# pls.new[0]=(data_com.query('playlist_sample == '+str(1)).shape[0])/3
# for i in range(2,last_pl+1):
# curr_pl = data_com.query('playlist_sample == '+str(i))
# pre_pl = data_com.query('playlist_sample == '+str(i-1))
# olds = curr_pl['song'].map(pre_pl['song'].value_counts()).sum(axis = 0)/3
# pls.old[i-1]= olds/3
# pls.new[i-1]=(curr_pl.shape[0]-olds)/3
# pls[['old','new']].plot(kind='bar', stacked=True, title=data_com.iloc[0,0])
df_last_pl= data_com.query('playlist_sample == '+str(last_pl))
positives.append(df_last_pl)
pos_loc = pd.DataFrame({}, columns=data_com.columns)
for index, row in data_com[data_com.playlist_sample<last_pl].iterrows():
if not ((df_last_pl['artist'] == row['artist']) & (df_last_pl['song'] == row['song'])).any():
pos_loc= pos_loc.append(row, ignore_index=True)
# n_vs_p = pd.DataFrame({'sam':['pos', 'neg'],'num':[df_last_pl.shape[0]/3,pos_loc.shape[0]/3]})
# n_vs_p.plot.bar(x='sam', y='num', rot=0, title=data_com.iloc[0,0])
negatives.append(pos_loc)
df_n_ps = []
for i in range(len(negatives)):
negatives[i]['chosen']=0
positives[i]['chosen']=1
df_n_ps.append(negatives[i].append(positives[i]))
D:\Usuarios\1144084318\AppData\Roaming\Python\Python37\site-packages\ipykernel_launcher.py:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy after removing the cwd from sys.path.
import warnings
import matplotlib.pyplot as plt
import math
import seaborn as sns
warnings.filterwarnings('ignore')
alpha = 0.05
for df_n_p in df_n_ps:
df_n_p = df_n_p.fillna(0)
fig = plt.figure(figsize=(17,200))
i=1
for index in range(8,df_n_p.shape[1]-1):
name= df_n_p.columns.values[index]
df_n_p[name]=df_n_p[name].astype('float64')
mc = MultiComparison(df_n_p[name], df_n_p['chosen'])
mc_results = mc.tukeyhsd()
if mc_results._results_table.data[1:][0][5]:
# print(name)
results = ols(name+' ~ C(chosen)', data=df_n_p).fit()
homogeneity_test = stats.levene(df_n_p[name][df_n_p['chosen'] == 0], df_n_p[name][df_n_p['chosen'] == 1])[1]
normality_test = stats.shapiro(results.resid)[1]
if homogeneity_test > alpha and normality_test > alpha:
# print(results.summary())
ax = fig.add_subplot(math.ceil(df_n_p.shape[1]-9/2), 2, i)
sns.kdeplot(df_n_p.loc[df_n_p.chosen==0][name], shade=True, ax=ax);
sns.kdeplot(df_n_p.loc[df_n_p.chosen==1][name], shade=True, ax=ax);
plt.title(df_n_p.iloc[0,0].upper()+" "+name)
plt.legend(['neg', 'pos'])
i+=1
from collections import Counter
from sklearn.cluster import KMeans
from sklearn.metrics import confusion_matrix, accuracy_score, silhouette_samples, silhouette_score, calinski_harabaz_score
from sklearn import preprocessing
from sklearn.decomposition import PCA
for i in range(len(companies)):
df_n_ps[i].bitratefiles = df_n_ps[i].bitratefiles.astype('float64')
df_n_ps[i].pitchfiles = df_n_ps[i].pitchfiles.astype('float64')
df_n_ps[i].bestkeyfiles = df_n_ps[i].bestkeyfiles.astype('float64')
df_n_ps[0].info()
<class 'pandas.core.frame.DataFrame'> Int64Index: 372 entries, 0 to 179 Data columns (total 65 columns): company 372 non-null object playlist_sample 372 non-null object namesfiles 372 non-null object no 372 non-null object artist 372 non-null object song 372 non-null object sampleratefiles 372 non-null object totalsamplesfiles 372 non-null object durationfiles 372 non-null float64 bitratefiles 372 non-null float64 rmsfiles 372 non-null float64 rmsmedianfiles 372 non-null float64 lowenergyfiles 372 non-null float64 ASRfiles 372 non-null float64 beatspectrumfiles 372 non-null float64 eventdensityfiles 372 non-null float64 tempofiles 372 non-null float64 pulseclarityfiles 372 non-null float64 zerocrossfiles 372 non-null float64 rolloffsfiles 372 non-null float64 brightnessfiles 372 non-null float64 spreadfiles 372 non-null float64 centroidfiles 371 non-null float64 kurtosisfiles 372 non-null float64 flatnessfiles 372 non-null float64 entropyfiles 372 non-null float64 mfccfiles_1 372 non-null float64 mfccfiles_2 372 non-null float64 mfccfiles_3 372 non-null float64 mfccfiles_4 372 non-null float64 mfccfiles_5 372 non-null float64 mfccfiles_6 372 non-null float64 mfccfiles_7 372 non-null float64 mfccfiles_8 372 non-null float64 mfccfiles_9 372 non-null float64 mfccfiles_10 372 non-null float64 mfccfiles_11 372 non-null float64 mfccfiles_12 372 non-null float64 mfccfiles_13 372 non-null float64 pitchfiles 372 non-null float64 inharmonicityfiles 372 non-null float64 bestkeyfiles 372 non-null float64 keyclarityfiles 372 non-null float64 modalityfiles 372 non-null float64 tonalcentroidfiles_1 372 non-null float64 tonalcentroidfiles_2 372 non-null float64 tonalcentroidfiles_3 372 non-null float64 tonalcentroidfiles_4 372 non-null float64 tonalcentroidfiles_5 372 non-null float64 tonalcentroidfiles_6 372 non-null float64 chromagramfiles_1 372 non-null float64 chromagramfiles_2 372 non-null float64 chromagramfiles_3 372 non-null float64 chromagramfiles_4 372 non-null float64 chromagramfiles_5 372 non-null float64 chromagramfiles_6 372 non-null float64 chromagramfiles_7 372 non-null float64 chromagramfiles_8 372 non-null float64 chromagramfiles_9 372 non-null float64 chromagramfiles_10 372 non-null float64 chromagramfiles_11 372 non-null float64 chromagramfiles_12 372 non-null float64 attackslopefiles 372 non-null float64 attackleapfiles 372 non-null float64 chosen 372 non-null int64 dtypes: float64(56), int64(1), object(8) memory usage: 191.8+ KB
Vamos a reemplazar los NaN y entonces a normalizar los datos para que todas las variables tengan la misma importancia. Solo vamos a considerar los datos numéricos.
df_n_ps_std = [0]*len(companies)
for i in range(len(companies)):
df_n_ps[i] = df_n_ps[i].fillna(0)
df_n_ps_std[i] = pd.DataFrame(preprocessing.scale(df_n_ps[i].iloc[:,8:]))
df_n_ps_std[i].columns=df_n_ps[i].columns[8:]
df_n_ps_std[0].mean(axis=0)
durationfiles -4.261824e-16 bitratefiles 0.000000e+00 rmsfiles 4.303606e-16 rmsmedianfiles -4.279731e-16 lowenergyfiles -2.387576e-18 ASRfiles -2.023471e-16 beatspectrumfiles 2.477111e-16 eventdensityfiles -7.879002e-17 tempofiles -3.133694e-17 pulseclarityfiles 3.103849e-17 zerocrossfiles -2.930750e-16 rolloffsfiles 5.789873e-16 brightnessfiles -8.356517e-17 spreadfiles -3.842506e-16 centroidfiles -2.142850e-16 kurtosisfiles -6.327077e-17 flatnessfiles 1.366887e-16 entropyfiles 3.516900e-15 mfccfiles_1 -1.921999e-16 mfccfiles_2 -5.372047e-18 mfccfiles_3 -1.178120e-16 mfccfiles_4 -2.648718e-17 mfccfiles_5 -4.655774e-17 mfccfiles_6 -1.193788e-18 mfccfiles_7 6.002516e-17 mfccfiles_8 1.492235e-17 mfccfiles_9 5.133289e-17 mfccfiles_10 2.596489e-17 mfccfiles_11 -3.402296e-17 mfccfiles_12 -4.775153e-18 mfccfiles_13 -4.476706e-18 pitchfiles 0.000000e+00 inharmonicityfiles 2.595743e-15 bestkeyfiles -8.475896e-17 keyclarityfiles 5.369062e-16 modalityfiles -3.282918e-17 tonalcentroidfiles_1 -1.522080e-17 tonalcentroidfiles_2 -6.565835e-18 tonalcentroidfiles_3 -9.699529e-18 tonalcentroidfiles_4 1.671303e-17 tonalcentroidfiles_5 -2.193586e-17 tonalcentroidfiles_6 2.059285e-17 chromagramfiles_1 -1.811574e-16 chromagramfiles_2 -4.282715e-17 chromagramfiles_3 4.819920e-17 chromagramfiles_4 -2.188363e-16 chromagramfiles_5 -3.282918e-18 chromagramfiles_6 -8.834033e-17 chromagramfiles_7 3.730588e-17 chromagramfiles_8 -1.140068e-16 chromagramfiles_9 -2.715868e-17 chromagramfiles_10 -6.707597e-17 chromagramfiles_11 -6.051014e-17 chromagramfiles_12 2.148446e-16 attackslopefiles -4.327482e-17 attackleapfiles -1.551925e-16 chosen -3.068036e-16 dtype: float64
df_n_ps_std[0].std(axis=0)
durationfiles 1.001347 bitratefiles 0.000000 rmsfiles 1.001347 rmsmedianfiles 1.001347 lowenergyfiles 1.001347 ASRfiles 1.001347 beatspectrumfiles 1.001347 eventdensityfiles 1.001347 tempofiles 1.001347 pulseclarityfiles 1.001347 zerocrossfiles 1.001347 rolloffsfiles 1.001347 brightnessfiles 1.001347 spreadfiles 1.001347 centroidfiles 1.001347 kurtosisfiles 1.001347 flatnessfiles 1.001347 entropyfiles 1.001347 mfccfiles_1 1.001347 mfccfiles_2 1.001347 mfccfiles_3 1.001347 mfccfiles_4 1.001347 mfccfiles_5 1.001347 mfccfiles_6 1.001347 mfccfiles_7 1.001347 mfccfiles_8 1.001347 mfccfiles_9 1.001347 mfccfiles_10 1.001347 mfccfiles_11 1.001347 mfccfiles_12 1.001347 mfccfiles_13 1.001347 pitchfiles 0.000000 inharmonicityfiles 1.001347 bestkeyfiles 1.001347 keyclarityfiles 1.001347 modalityfiles 1.001347 tonalcentroidfiles_1 1.001347 tonalcentroidfiles_2 1.001347 tonalcentroidfiles_3 1.001347 tonalcentroidfiles_4 1.001347 tonalcentroidfiles_5 1.001347 tonalcentroidfiles_6 1.001347 chromagramfiles_1 1.001347 chromagramfiles_2 1.001347 chromagramfiles_3 1.001347 chromagramfiles_4 1.001347 chromagramfiles_5 1.001347 chromagramfiles_6 1.001347 chromagramfiles_7 1.001347 chromagramfiles_8 1.001347 chromagramfiles_9 1.001347 chromagramfiles_10 1.001347 chromagramfiles_11 1.001347 chromagramfiles_12 1.001347 attackslopefiles 1.001347 attackleapfiles 1.001347 chosen 1.001347 dtype: float64
Borramos pitch y bitrate porque todos sus valores son 0.
for i in range(len(companies)):
df_n_ps_std[i] = df_n_ps_std[i].drop(columns="pitchfiles")
df_n_ps_std[i] = df_n_ps_std[i].drop(columns="bitratefiles")
df_n_ps_std[0].columns
Index(['durationfiles', 'rmsfiles', 'rmsmedianfiles', 'lowenergyfiles',
'ASRfiles', 'beatspectrumfiles', 'eventdensityfiles', 'tempofiles',
'pulseclarityfiles', 'zerocrossfiles', 'rolloffsfiles',
'brightnessfiles', 'spreadfiles', 'centroidfiles', 'kurtosisfiles',
'flatnessfiles', 'entropyfiles', 'mfccfiles_1', 'mfccfiles_2',
'mfccfiles_3', 'mfccfiles_4', 'mfccfiles_5', 'mfccfiles_6',
'mfccfiles_7', 'mfccfiles_8', 'mfccfiles_9', 'mfccfiles_10',
'mfccfiles_11', 'mfccfiles_12', 'mfccfiles_13', 'inharmonicityfiles',
'bestkeyfiles', 'keyclarityfiles', 'modalityfiles',
'tonalcentroidfiles_1', 'tonalcentroidfiles_2', 'tonalcentroidfiles_3',
'tonalcentroidfiles_4', 'tonalcentroidfiles_5', 'tonalcentroidfiles_6',
'chromagramfiles_1', 'chromagramfiles_2', 'chromagramfiles_3',
'chromagramfiles_4', 'chromagramfiles_5', 'chromagramfiles_6',
'chromagramfiles_7', 'chromagramfiles_8', 'chromagramfiles_9',
'chromagramfiles_10', 'chromagramfiles_11', 'chromagramfiles_12',
'attackslopefiles', 'attackleapfiles', 'chosen'],
dtype='object')
df_n_ps_std[0].columns[17:30]
Index(['mfccfiles_1', 'mfccfiles_2', 'mfccfiles_3', 'mfccfiles_4',
'mfccfiles_5', 'mfccfiles_6', 'mfccfiles_7', 'mfccfiles_8',
'mfccfiles_9', 'mfccfiles_10', 'mfccfiles_11', 'mfccfiles_12',
'mfccfiles_13'],
dtype='object')
df_n_ps_std_mfcc = [None]*len(companies)
for i in range(len(companies)):
df_n_ps_std_mfcc[i] = pd.DataFrame(df_n_ps_std[i].iloc[:,17:30])
df_n_ps_std_mfcc[i].columns=df_n_ps_std[i].columns[17:30]
df_n_ps_std_mfcc[0].info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 372 entries, 0 to 371 Data columns (total 13 columns): mfccfiles_1 372 non-null float64 mfccfiles_2 372 non-null float64 mfccfiles_3 372 non-null float64 mfccfiles_4 372 non-null float64 mfccfiles_5 372 non-null float64 mfccfiles_6 372 non-null float64 mfccfiles_7 372 non-null float64 mfccfiles_8 372 non-null float64 mfccfiles_9 372 non-null float64 mfccfiles_10 372 non-null float64 mfccfiles_11 372 non-null float64 mfccfiles_12 372 non-null float64 mfccfiles_13 372 non-null float64 dtypes: float64(13) memory usage: 37.9 KB
import keras
keras.__version__
Using TensorFlow backend.
'2.3.0'
from keras.layers import Input, Flatten, Dense#, Lambda
from keras.models import Model
from keras import layers
from keras import models, optimizers
from sklearn.neural_network import MLPClassifier
from sklearn.model_selection import GridSearchCV #permite buscar la mejor configuración de parámetros con C-V
from sklearn.metrics import make_scorer # permite crear una clase scorer a partir de una función de score (necesario para el kappa)
from sklearn.metrics import accuracy_score, cohen_kappa_score, classification_report, roc_auc_score
from sklearn.model_selection import train_test_split #metodo de particionamiento de datasets para evaluación
from sklearn.preprocessing import StandardScaler
X = df_n_ps_std_mfcc[0]
y = df_n_ps[0]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(279, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (20,), 'learning_rate_init': 0.006, 'max_iter': 300}, que permiten obtener un Accuracy de 82.08% y un Kappa del 43.49
Tiempo total: 29.12 minutos
C:\ProgramData\Anaconda3\lib\site-packages\sklearn\neural_network\multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (300) reached and the optimization hasn't converged yet. % self.max_iter, ConvergenceWarning)
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = 0.006
epochs = 300
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_3 (Dense) (None, 20) 280 _________________________________________________________________ dense_4 (Dense) (None, 1) 21 ================================================================= Total params: 301 Trainable params: 301 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 279 samples, validate on 93 samples Epoch 1/300 279/279 [==============================] - 1s 3ms/step - loss: 0.7063 - accuracy: 0.5197 - val_loss: 0.6660 - val_accuracy: 0.5914 Epoch 2/300 279/279 [==============================] - 0s 61us/step - loss: 0.6003 - accuracy: 0.6918 - val_loss: 0.5741 - val_accuracy: 0.7419 Epoch 3/300 279/279 [==============================] - 0s 68us/step - loss: 0.5317 - accuracy: 0.7348 - val_loss: 0.5278 - val_accuracy: 0.7527 Epoch 4/300 279/279 [==============================] - 0s 72us/step - loss: 0.4996 - accuracy: 0.7634 - val_loss: 0.5053 - val_accuracy: 0.7742 Epoch 5/300 279/279 [==============================] - 0s 75us/step - loss: 0.4770 - accuracy: 0.7778 - val_loss: 0.4938 - val_accuracy: 0.8065 Epoch 6/300 279/279 [==============================] - 0s 90us/step - loss: 0.4639 - accuracy: 0.7778 - val_loss: 0.4845 - val_accuracy: 0.7957 Epoch 7/300 279/279 [==============================] - 0s 82us/step - loss: 0.4526 - accuracy: 0.7849 - val_loss: 0.4813 - val_accuracy: 0.8172 Epoch 8/300 279/279 [==============================] - 0s 75us/step - loss: 0.4466 - accuracy: 0.7921 - val_loss: 0.4822 - val_accuracy: 0.8065 Epoch 9/300 279/279 [==============================] - 0s 79us/step - loss: 0.4412 - accuracy: 0.7993 - val_loss: 0.4819 - val_accuracy: 0.8172 Epoch 10/300 279/279 [==============================] - 0s 82us/step - loss: 0.4373 - accuracy: 0.8065 - val_loss: 0.4840 - val_accuracy: 0.7957 Epoch 11/300 279/279 [==============================] - 0s 86us/step - loss: 0.4343 - accuracy: 0.8136 - val_loss: 0.4827 - val_accuracy: 0.7957 Epoch 12/300 279/279 [==============================] - 0s 75us/step - loss: 0.4328 - accuracy: 0.8100 - val_loss: 0.4866 - val_accuracy: 0.7849 Epoch 13/300 279/279 [==============================] - 0s 82us/step - loss: 0.4225 - accuracy: 0.8136 - val_loss: 0.4860 - val_accuracy: 0.7957 Epoch 14/300 279/279 [==============================] - 0s 75us/step - loss: 0.4186 - accuracy: 0.8136 - val_loss: 0.4865 - val_accuracy: 0.7849 Epoch 15/300 279/279 [==============================] - 0s 82us/step - loss: 0.4118 - accuracy: 0.8136 - val_loss: 0.4846 - val_accuracy: 0.7849 Epoch 16/300 279/279 [==============================] - 0s 79us/step - loss: 0.4080 - accuracy: 0.8208 - val_loss: 0.4901 - val_accuracy: 0.7849 Epoch 17/300 279/279 [==============================] - 0s 107us/step - loss: 0.4009 - accuracy: 0.8351 - val_loss: 0.4878 - val_accuracy: 0.7742 Epoch 00017: ReduceLROnPlateau reducing learning rate to 0.003000000026077032. Epoch 18/300 279/279 [==============================] - 0s 107us/step - loss: 0.3950 - accuracy: 0.8387 - val_loss: 0.4864 - val_accuracy: 0.7742 Epoch 19/300 279/279 [==============================] - 0s 68us/step - loss: 0.3922 - accuracy: 0.8387 - val_loss: 0.4852 - val_accuracy: 0.7742 Epoch 20/300 279/279 [==============================] - 0s 86us/step - loss: 0.3886 - accuracy: 0.8530 - val_loss: 0.4807 - val_accuracy: 0.7849 Epoch 21/300 279/279 [==============================] - 0s 64us/step - loss: 0.3865 - accuracy: 0.8566 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 22/300 279/279 [==============================] - 0s 82us/step - loss: 0.3828 - accuracy: 0.8530 - val_loss: 0.4848 - val_accuracy: 0.7849 Epoch 23/300 279/279 [==============================] - 0s 79us/step - loss: 0.3800 - accuracy: 0.8566 - val_loss: 0.4855 - val_accuracy: 0.7849 Epoch 24/300 279/279 [==============================] - 0s 68us/step - loss: 0.3760 - accuracy: 0.8638 - val_loss: 0.4818 - val_accuracy: 0.7849 Epoch 25/300 279/279 [==============================] - 0s 79us/step - loss: 0.3744 - accuracy: 0.8602 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 26/300 279/279 [==============================] - 0s 86us/step - loss: 0.3705 - accuracy: 0.8710 - val_loss: 0.4781 - val_accuracy: 0.7849 Epoch 27/300 279/279 [==============================] - 0s 86us/step - loss: 0.3666 - accuracy: 0.8746 - val_loss: 0.4785 - val_accuracy: 0.7849 Epoch 00027: ReduceLROnPlateau reducing learning rate to 0.001500000013038516. Epoch 28/300 279/279 [==============================] - 0s 79us/step - loss: 0.3637 - accuracy: 0.8746 - val_loss: 0.4799 - val_accuracy: 0.7849 Epoch 29/300 279/279 [==============================] - 0s 75us/step - loss: 0.3620 - accuracy: 0.8746 - val_loss: 0.4816 - val_accuracy: 0.7849 Epoch 30/300 279/279 [==============================] - 0s 79us/step - loss: 0.3604 - accuracy: 0.8746 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 31/300 279/279 [==============================] - 0s 75us/step - loss: 0.3596 - accuracy: 0.8746 - val_loss: 0.4831 - val_accuracy: 0.7849 Epoch 32/300 279/279 [==============================] - 0s 79us/step - loss: 0.3572 - accuracy: 0.8781 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 33/300 279/279 [==============================] - 0s 79us/step - loss: 0.3555 - accuracy: 0.8781 - val_loss: 0.4817 - val_accuracy: 0.7849 Epoch 34/300 279/279 [==============================] - 0s 79us/step - loss: 0.3540 - accuracy: 0.8817 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 35/300 279/279 [==============================] - 0s 82us/step - loss: 0.3527 - accuracy: 0.8817 - val_loss: 0.4817 - val_accuracy: 0.7849 Epoch 36/300 279/279 [==============================] - 0s 75us/step - loss: 0.3510 - accuracy: 0.8817 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 37/300 279/279 [==============================] - 0s 104us/step - loss: 0.3493 - accuracy: 0.8781 - val_loss: 0.4836 - val_accuracy: 0.7849 Epoch 00037: ReduceLROnPlateau reducing learning rate to 0.000750000006519258. Epoch 38/300 279/279 [==============================] - 0s 86us/step - loss: 0.3476 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 39/300 279/279 [==============================] - 0s 82us/step - loss: 0.3469 - accuracy: 0.8817 - val_loss: 0.4823 - val_accuracy: 0.7849 Epoch 40/300 279/279 [==============================] - 0s 75us/step - loss: 0.3457 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 41/300 279/279 [==============================] - 0s 82us/step - loss: 0.3449 - accuracy: 0.8817 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 42/300 279/279 [==============================] - 0s 90us/step - loss: 0.3443 - accuracy: 0.8817 - val_loss: 0.4830 - val_accuracy: 0.7849 Epoch 43/300 279/279 [==============================] - 0s 104us/step - loss: 0.3434 - accuracy: 0.8817 - val_loss: 0.4833 - val_accuracy: 0.7849 Epoch 44/300 279/279 [==============================] - 0s 93us/step - loss: 0.3427 - accuracy: 0.8817 - val_loss: 0.4835 - val_accuracy: 0.7849 Epoch 45/300 279/279 [==============================] - 0s 90us/step - loss: 0.3418 - accuracy: 0.8817 - val_loss: 0.4831 - val_accuracy: 0.7849 Epoch 46/300 279/279 [==============================] - 0s 93us/step - loss: 0.3411 - accuracy: 0.8817 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 47/300 279/279 [==============================] - 0s 97us/step - loss: 0.3403 - accuracy: 0.8817 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 00047: ReduceLROnPlateau reducing learning rate to 0.000375000003259629. Epoch 48/300 279/279 [==============================] - 0s 111us/step - loss: 0.3394 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 49/300 279/279 [==============================] - 0s 100us/step - loss: 0.3389 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 50/300 279/279 [==============================] - 0s 111us/step - loss: 0.3386 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 51/300 279/279 [==============================] - 0s 115us/step - loss: 0.3381 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 52/300 279/279 [==============================] - 0s 136us/step - loss: 0.3378 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 53/300 279/279 [==============================] - 0s 118us/step - loss: 0.3373 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 54/300 279/279 [==============================] - 0s 86us/step - loss: 0.3370 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 55/300 279/279 [==============================] - 0s 115us/step - loss: 0.3365 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 56/300 279/279 [==============================] - 0s 107us/step - loss: 0.3362 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 57/300 279/279 [==============================] - 0s 122us/step - loss: 0.3358 - accuracy: 0.8853 - val_loss: 0.4830 - val_accuracy: 0.7849 Epoch 00057: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145. Epoch 58/300 279/279 [==============================] - 0s 104us/step - loss: 0.3353 - accuracy: 0.8853 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 59/300 279/279 [==============================] - 0s 107us/step - loss: 0.3351 - accuracy: 0.8853 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 60/300 279/279 [==============================] - 0s 104us/step - loss: 0.3349 - accuracy: 0.8853 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 61/300 279/279 [==============================] - 0s 104us/step - loss: 0.3347 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 62/300 279/279 [==============================] - 0s 100us/step - loss: 0.3345 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 63/300 279/279 [==============================] - 0s 79us/step - loss: 0.3343 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 64/300 279/279 [==============================] - 0s 90us/step - loss: 0.3341 - accuracy: 0.8889 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 65/300 279/279 [==============================] - 0s 93us/step - loss: 0.3339 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 66/300 279/279 [==============================] - 0s 104us/step - loss: 0.3337 - accuracy: 0.8889 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 67/300 279/279 [==============================] - 0s 79us/step - loss: 0.3335 - accuracy: 0.8889 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 00067: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05. Epoch 68/300 279/279 [==============================] - 0s 100us/step - loss: 0.3333 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 69/300 279/279 [==============================] - 0s 104us/step - loss: 0.3332 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 70/300 279/279 [==============================] - 0s 90us/step - loss: 0.3331 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 71/300 279/279 [==============================] - 0s 100us/step - loss: 0.3330 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 72/300 279/279 [==============================] - 0s 107us/step - loss: 0.3329 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 73/300 279/279 [==============================] - 0s 104us/step - loss: 0.3328 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 74/300 279/279 [==============================] - 0s 118us/step - loss: 0.3327 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 75/300 279/279 [==============================] - 0s 100us/step - loss: 0.3326 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 76/300 279/279 [==============================] - 0s 90us/step - loss: 0.3325 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 77/300 279/279 [==============================] - 0s 72us/step - loss: 0.3324 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00077: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05. Epoch 78/300 279/279 [==============================] - 0s 111us/step - loss: 0.3323 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 79/300 279/279 [==============================] - 0s 97us/step - loss: 0.3322 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 80/300 279/279 [==============================] - 0s 97us/step - loss: 0.3322 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 81/300 279/279 [==============================] - 0s 97us/step - loss: 0.3321 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 82/300 279/279 [==============================] - 0s 104us/step - loss: 0.3321 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 83/300 279/279 [==============================] - 0s 86us/step - loss: 0.3320 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 84/300 279/279 [==============================] - 0s 86us/step - loss: 0.3320 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 85/300 279/279 [==============================] - 0s 111us/step - loss: 0.3319 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 86/300 279/279 [==============================] - 0s 93us/step - loss: 0.3319 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 87/300 279/279 [==============================] - 0s 90us/step - loss: 0.3318 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00087: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05. Epoch 88/300 279/279 [==============================] - 0s 90us/step - loss: 0.3318 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 89/300 279/279 [==============================] - 0s 104us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 90/300 279/279 [==============================] - 0s 100us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 91/300 279/279 [==============================] - 0s 93us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 92/300 279/279 [==============================] - 0s 93us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 93/300 279/279 [==============================] - 0s 93us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 94/300 279/279 [==============================] - 0s 100us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 95/300 279/279 [==============================] - 0s 90us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 96/300 279/279 [==============================] - 0s 97us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 97/300 279/279 [==============================] - 0s 100us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00097: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05. Epoch 98/300 279/279 [==============================] - 0s 97us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 99/300 279/279 [==============================] - ETA: 0s - loss: 0.3474 - accuracy: 0.81 - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 100/300 279/279 [==============================] - ETA: 0s - loss: 0.2551 - accuracy: 0.93 - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 101/300 279/279 [==============================] - 0s 107us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 102/300 279/279 [==============================] - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 103/300 279/279 [==============================] - 0s 86us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 104/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 105/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 106/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 107/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00107: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06. Epoch 108/300 279/279 [==============================] - 0s 100us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 109/300 279/279 [==============================] - ETA: 0s - loss: 0.2823 - accuracy: 0.90 - 0s 100us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 110/300 279/279 [==============================] - 0s 90us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 111/300 279/279 [==============================] - 0s 72us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 112/300 279/279 [==============================] - 0s 107us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 113/300 279/279 [==============================] - 0s 86us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 114/300 279/279 [==============================] - 0s 72us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 115/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 116/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 117/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00117: ReduceLROnPlateau reducing learning rate to 2.9296875254658516e-06. Epoch 118/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 119/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 120/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 121/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 122/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 123/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 124/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 125/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 126/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 127/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00127: ReduceLROnPlateau reducing learning rate to 1.4648437627329258e-06. Epoch 128/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 129/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 130/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 131/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 132/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 133/300 279/279 [==============================] - 0s 125us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 134/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 135/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 136/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 137/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00137: ReduceLROnPlateau reducing learning rate to 7.324218813664629e-07. Epoch 138/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 139/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 140/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 141/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 142/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 143/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 144/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 145/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 146/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 147/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00147: ReduceLROnPlateau reducing learning rate to 3.6621094068323146e-07. Epoch 148/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 149/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 150/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 151/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 152/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 153/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 154/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 155/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 156/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 157/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00157: ReduceLROnPlateau reducing learning rate to 1.8310547034161573e-07. Epoch 158/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 159/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 160/300 279/279 [==============================] - 0s 133us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 161/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 162/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 163/300 279/279 [==============================] - 0s 68us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 164/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 165/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 166/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 167/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00167: ReduceLROnPlateau reducing learning rate to 9.155273517080786e-08. Epoch 168/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 169/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 170/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 171/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 172/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 173/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 174/300 279/279 [==============================] - 0s 168us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 175/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 176/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 177/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00177: ReduceLROnPlateau reducing learning rate to 4.577636758540393e-08. Epoch 178/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 179/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 180/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 181/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 182/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 183/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 184/300 279/279 [==============================] - ETA: 0s - loss: 0.2749 - accuracy: 0.96 - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 185/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 186/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 187/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00187: ReduceLROnPlateau reducing learning rate to 2.2888183792701966e-08. Epoch 188/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 189/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 190/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 191/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 192/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 193/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 194/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 195/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 196/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 197/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00197: ReduceLROnPlateau reducing learning rate to 1.1444091896350983e-08. Epoch 198/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 199/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 200/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 201/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 202/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 203/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 204/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 205/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 206/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 207/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00207: ReduceLROnPlateau reducing learning rate to 5.7220459481754915e-09. Epoch 208/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 209/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 210/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 211/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 212/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 213/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 214/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 215/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 216/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 217/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00217: ReduceLROnPlateau reducing learning rate to 2.8610229740877458e-09. Epoch 218/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 219/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 220/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 221/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 222/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 223/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 224/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 225/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 226/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 227/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00227: ReduceLROnPlateau reducing learning rate to 1.4305114870438729e-09. Epoch 228/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 229/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 230/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 231/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 232/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 233/300 279/279 [==============================] - 0s 122us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 234/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 235/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 236/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 237/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00237: ReduceLROnPlateau reducing learning rate to 7.152557435219364e-10. Epoch 238/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 239/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 240/300 279/279 [==============================] - 0s 68us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 241/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 242/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 243/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 244/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 245/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 246/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 247/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00247: ReduceLROnPlateau reducing learning rate to 3.576278717609682e-10. Epoch 248/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 249/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 250/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 251/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 252/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 253/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 254/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 255/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 256/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 257/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00257: ReduceLROnPlateau reducing learning rate to 1.788139358804841e-10. Epoch 258/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 259/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 260/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 261/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 262/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 263/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 264/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 265/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 266/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 267/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00267: ReduceLROnPlateau reducing learning rate to 8.940696794024205e-11. Epoch 268/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 269/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 270/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 271/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 272/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 273/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 274/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 275/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 276/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 277/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00277: ReduceLROnPlateau reducing learning rate to 4.470348397012103e-11. Epoch 278/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 279/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 280/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 281/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 282/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 283/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 284/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 285/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 286/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 287/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00287: ReduceLROnPlateau reducing learning rate to 2.2351741985060514e-11. Epoch 288/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 289/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 290/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 291/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 292/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 293/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 294/300 279/279 [==============================] - 0s 118us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 295/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 296/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 297/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00297: ReduceLROnPlateau reducing learning rate to 1.1175870992530257e-11. Epoch 298/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 299/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 300/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 300)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
93/93 [==============================] - 0s 54us/step test loss: 0.48277782432494626, test accuracy: 0.7849462628364563
y_pred = model.predict(X_test)
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
0.34275618374558303
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.303210 | 1.253016 | -0.394054 | 0.592598 | 0.106623 | -2.083256 | 0.858313 | 0.801936 | 2.380580 | -1.304258 | 0.556361 | 1.949530 | -1.046692 |
| 1 | 0.647559 | -1.553511 | -1.648243 | -1.010792 | -0.857927 | 0.335856 | 0.884468 | 0.250703 | 0.298648 | 0.683922 | 1.599907 | 0.349480 | -0.484103 |
| 2 | 2.266625 | -0.333664 | 0.685765 | -2.001143 | -0.820018 | -2.442242 | -1.583451 | -2.793213 | -2.158376 | -2.431307 | -0.855856 | -0.471626 | -1.478884 |
| 3 | 0.672266 | -0.397422 | 0.105106 | -1.822060 | -1.335294 | -1.384110 | -0.608579 | -1.639581 | -2.081721 | -1.171877 | -1.102493 | -1.264328 | -1.165166 |
| 4 | 0.747622 | 0.110331 | -0.079109 | -1.108698 | -0.391749 | -0.448919 | 0.133859 | -0.843237 | -1.924086 | -0.222835 | 0.221819 | 0.017631 | -0.725177 |
| 5 | 2.072441 | -1.565884 | -0.268750 | -1.648648 | -3.149219 | -3.406801 | -4.332461 | -1.709990 | -1.313696 | -1.503431 | -2.036749 | -1.928213 | -2.657430 |
| 6 | -0.163654 | 0.470736 | 0.440011 | 0.594090 | -1.227236 | 0.409218 | 0.608496 | -0.948833 | -2.051031 | 0.892327 | 0.371683 | -0.318984 | 0.022251 |
| 7 | 0.477732 | -0.217651 | -0.908178 | -1.257961 | -1.360625 | -0.551388 | -0.599896 | 0.099066 | 1.299780 | 2.443060 | 0.417236 | -0.919898 | -0.916391 |
| 8 | 0.897786 | -3.040013 | 0.311694 | -0.386220 | -0.321124 | -0.221380 | -0.207002 | -0.127210 | 0.011073 | 0.211925 | -0.400748 | -0.296623 | -0.143419 |
| 9 | -0.977087 | 1.088438 | -0.184899 | -0.626934 | 0.577247 | 0.522552 | 1.150101 | 1.023214 | 0.136257 | 0.193237 | -0.496760 | 0.367549 | 0.378679 |
| 10 | 0.328615 | -1.292300 | -1.398337 | -0.677268 | -1.070980 | -0.075073 | -0.740061 | -0.424240 | -0.216693 | 0.633892 | -0.070397 | 0.960392 | 0.403827 |
| 11 | -0.199470 | 0.110219 | 0.238637 | 0.455154 | -0.116209 | 0.374450 | 0.078145 | 0.424005 | 0.633052 | -0.153498 | -0.647002 | 0.301135 | -0.000406 |
| 12 | -0.711256 | 0.124802 | 0.734425 | -0.445078 | -0.503247 | -0.323539 | 0.236246 | -0.572803 | -0.221112 | -0.206486 | -0.180516 | 0.119335 | 0.027470 |
| 13 | -0.806898 | -0.126740 | -0.383726 | 0.035489 | -1.164460 | -0.574335 | -0.633858 | -0.009812 | -0.131411 | 0.549197 | -0.257952 | 0.307916 | 0.814674 |
| 14 | -0.077242 | -0.331495 | 0.550493 | -0.008575 | -0.215759 | -1.260552 | -0.581296 | 0.369790 | -0.684267 | 0.792489 | -0.457321 | -0.704205 | -0.093986 |
| 15 | 0.244538 | 0.777957 | 0.464181 | 0.169574 | -0.433604 | -1.172185 | -1.866928 | 0.759778 | -0.372608 | 0.009766 | 0.964104 | 1.082661 | -0.506505 |
| 16 | 0.602329 | -0.035069 | 0.178352 | -0.036690 | 0.180302 | -0.769568 | 0.364535 | 0.996915 | 0.263984 | -0.829872 | -0.133422 | 0.601135 | -1.217336 |
| 17 | -0.570258 | -0.759570 | 0.108993 | 0.657477 | 0.342355 | -0.903388 | 0.112467 | -0.669060 | -0.661619 | 0.915675 | 1.620722 | -0.160697 | 0.379275 |
| 18 | -0.288268 | -1.202534 | -0.544058 | 0.295908 | 0.568680 | -1.416228 | 0.423676 | 0.041836 | -0.665694 | 0.699155 | -0.070704 | -0.429451 | 1.194384 |
| 19 | -0.497305 | -0.552590 | 0.332470 | 0.660607 | 0.293725 | -0.945647 | -1.269354 | 0.464095 | 1.166255 | 2.034233 | 2.037855 | 0.555927 | 0.423683 |
| 20 | 1.386141 | -0.516432 | -0.074640 | 0.751101 | -1.151864 | 0.155819 | -1.921431 | -3.381158 | -1.145758 | -1.197084 | 0.654749 | 1.636425 | 0.993236 |
| 21 | 0.076772 | 0.072900 | 0.122544 | 0.799017 | -1.121011 | -0.137599 | -1.150187 | -1.669293 | -1.110882 | -0.047217 | -0.034112 | -0.659214 | 1.160642 |
| 22 | 0.670757 | -0.167252 | -0.352765 | 0.189499 | -1.232602 | -0.168579 | -1.559900 | -1.850665 | -1.416478 | 0.031846 | 0.308193 | -0.956133 | 0.507231 |
| 23 | 0.639283 | 0.699164 | 0.621380 | -0.725771 | -0.890352 | 0.643955 | -1.097228 | 0.229756 | -0.091793 | -2.390193 | -0.825768 | -2.164531 | -0.772983 |
| 24 | -0.907399 | 2.155157 | 0.873522 | 1.655111 | 0.871099 | 1.083262 | -0.186962 | 0.373227 | -0.354082 | 0.573586 | 0.733097 | -0.986481 | 0.727511 |
| 25 | -1.152272 | 1.601470 | 0.221927 | 1.296592 | 0.572807 | 0.581774 | -0.479257 | 0.209504 | -0.636178 | 0.574450 | 0.615706 | -0.877894 | 0.941827 |
| 26 | -0.676596 | 0.405600 | 0.553370 | 0.691531 | -0.292469 | 0.626694 | 0.080413 | 0.246868 | -0.100975 | 0.606694 | -0.024154 | -1.553730 | -0.210884 |
| 27 | -0.704834 | -0.058170 | 0.609171 | -0.735340 | -0.512747 | 0.796018 | -0.405976 | 0.502120 | 0.717380 | -1.625431 | 0.825742 | -1.663942 | -0.379395 |
| 28 | 0.273095 | 1.014503 | -0.772750 | -0.065028 | -0.513485 | 0.235377 | -0.266144 | 1.373964 | 0.711880 | -1.261758 | 1.106463 | 0.515863 | 0.555866 |
| 29 | 1.216372 | 0.637021 | 0.649194 | 0.099873 | -0.816614 | 0.555439 | -1.272918 | -0.035862 | 0.154194 | -1.797465 | -0.177830 | -1.702118 | -1.136716 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 342 | 0.056741 | 0.169776 | 0.434163 | -0.208821 | 0.498957 | 0.349198 | 0.547068 | 0.250228 | -0.743894 | -0.497075 | 0.373497 | -0.025547 | 0.260645 |
| 343 | -0.845812 | -0.163165 | 0.268174 | 1.312135 | 1.241686 | 1.484484 | 1.279782 | -0.350179 | -0.266719 | -0.170434 | 0.147000 | -0.259175 | -0.323251 |
| 344 | 0.056854 | 0.089458 | -0.128149 | 0.123107 | -0.879175 | 0.172486 | 0.919301 | 0.727007 | -0.032509 | -0.594358 | -0.241536 | -0.339538 | -1.563800 |
| 345 | 1.322735 | -0.970372 | -1.058427 | 1.018282 | -1.228871 | 0.835533 | 1.462831 | -1.481872 | -2.024441 | 0.388890 | 2.395768 | -0.993539 | 0.301816 |
| 346 | -0.159679 | -0.200313 | -0.181878 | 0.221536 | -0.604018 | 0.554979 | 0.173592 | -1.137738 | -1.525377 | -0.382164 | 1.156959 | 0.545188 | -0.873936 |
| 347 | -0.510690 | -0.141874 | -0.170690 | -0.486309 | -1.066447 | -1.098392 | -1.513393 | -0.202811 | 0.062343 | 0.446348 | -0.029988 | -0.024432 | -0.978036 |
| 348 | 0.441393 | 0.403987 | 0.538948 | 1.253198 | -0.158511 | 0.497768 | 0.151471 | -0.006025 | 0.213458 | 0.119760 | -0.002312 | 0.139434 | -0.401118 |
| 349 | 0.548477 | 0.987769 | 0.505748 | 0.779668 | 0.504327 | -0.003400 | 0.200264 | 0.287803 | 0.084852 | -0.044437 | 0.769553 | 0.169816 | -0.581506 |
| 350 | 0.278851 | -0.150632 | 1.015313 | 0.158731 | -1.435466 | -0.910636 | 1.526971 | 0.810376 | -0.088268 | 2.273901 | 1.895682 | -0.573207 | 1.173543 |
| 351 | 1.781784 | -0.680962 | -0.140043 | 1.730156 | 0.760657 | 1.081874 | 0.686370 | -0.456141 | -0.310319 | 0.443108 | 0.067726 | -0.804283 | 0.268616 |
| 352 | 1.110023 | -0.419764 | -0.451242 | 1.471440 | 0.860531 | 0.858025 | 1.016472 | 0.013533 | -0.532955 | 0.597255 | -0.385255 | -1.299309 | 0.869963 |
| 353 | 0.463780 | 0.094111 | 0.074193 | 0.457058 | -0.494585 | -0.741218 | -1.615368 | -0.323890 | 0.179301 | -0.914854 | -0.881275 | -0.284568 | 0.516848 |
| 354 | 0.162857 | 1.300630 | -0.374191 | -0.148478 | -0.275205 | 0.936621 | -0.301931 | 0.926288 | -0.242039 | -1.217862 | -0.849053 | 0.381655 | 1.521222 |
| 355 | -0.261040 | 1.897992 | 0.324175 | 0.250461 | -0.326921 | 0.078347 | -0.794723 | 1.245895 | 0.561437 | 0.299601 | 0.612062 | 0.375109 | 0.668225 |
| 356 | -2.412627 | -0.912657 | 0.924859 | 1.091412 | -0.430459 | 0.991776 | 0.577087 | 0.366311 | 0.916132 | -0.010096 | -0.337066 | 0.723121 | 0.634413 |
| 357 | -1.610420 | -0.171488 | 1.308910 | 1.557149 | -0.783120 | 1.055891 | 0.070922 | 0.736289 | 0.651236 | -0.209692 | -0.293388 | 0.549580 | 0.947465 |
| 358 | -1.627642 | -0.225022 | 1.420291 | 1.585386 | -0.623077 | 1.204209 | 0.203574 | 0.815228 | 0.701131 | -0.111706 | -0.352897 | 0.552444 | 1.038487 |
| 359 | 1.658650 | 0.261694 | 0.694273 | -0.634006 | -0.742717 | -1.107684 | -0.040641 | 0.685375 | 0.704374 | 0.457634 | -0.012812 | -0.227444 | -0.311482 |
| 360 | -0.472450 | 1.290735 | 1.251486 | 0.902820 | 1.064267 | 0.319911 | 0.273062 | -0.004026 | -0.730129 | -0.487802 | -0.590033 | 0.917054 | 0.316796 |
| 361 | 0.145973 | 1.078298 | -0.110458 | 0.396705 | 0.465683 | 0.120005 | 0.324478 | 0.647014 | 0.406366 | 0.303529 | 0.342183 | 0.418467 | -0.257006 |
| 362 | 1.354053 | 0.408020 | -1.449365 | -0.144038 | 0.735070 | 1.458916 | -0.253049 | 0.476118 | 1.309448 | 1.981607 | 0.319930 | -0.734588 | -2.427842 |
| 363 | 3.546326 | -0.337767 | -0.983896 | -3.155084 | 1.922015 | 3.128359 | 1.576092 | 2.767242 | 2.734920 | 1.749030 | -1.432287 | -5.486282 | -3.776088 |
| 364 | 3.564797 | -0.492960 | -0.663172 | -2.465245 | 2.044991 | 3.045697 | 1.746383 | 2.238430 | 2.806354 | 2.318786 | -0.732814 | -5.203217 | -4.762769 |
| 365 | -0.480041 | 0.390140 | 0.283493 | 0.710367 | 0.436247 | 0.787936 | 0.149057 | 1.081200 | 1.130496 | 0.783116 | 1.174331 | 0.987069 | 1.027523 |
| 366 | 1.601344 | 1.120977 | 0.942690 | 0.218542 | 1.432015 | 1.975393 | 1.352637 | 0.851851 | 0.707687 | 1.420656 | -1.301018 | 0.996552 | 2.286308 |
| 367 | -1.388425 | 0.554214 | 2.322455 | 0.125526 | 0.168411 | 1.459935 | -0.011567 | 0.377516 | 1.801634 | 1.061665 | 1.432895 | 1.553952 | 0.335629 |
| 368 | -0.560849 | 0.191976 | -1.558597 | -1.925355 | 0.006144 | 1.582531 | 2.334107 | 1.472221 | 0.788027 | -0.066399 | -0.539592 | -0.089987 | 1.081026 |
| 369 | -0.015724 | 0.095384 | -0.050287 | 0.330265 | -0.857518 | -1.110181 | 0.457976 | 1.235890 | 0.937447 | 1.294180 | 1.196429 | 1.964913 | 0.788473 |
| 370 | -0.320091 | 0.789370 | -0.347116 | -0.257819 | -0.264866 | 0.119392 | 0.174557 | -0.102622 | -0.147261 | 0.927949 | 1.494185 | 0.617596 | -0.119013 |
| 371 | -1.397911 | 0.969347 | -0.218602 | 0.165675 | -1.187201 | -1.022111 | 0.090487 | -0.281353 | -1.520146 | -0.950575 | -1.733689 | -0.924046 | -0.731033 |
372 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[4836.0, 4367.616182778379, 3974.0733975762073, 3720.4477504020774, 3549.9823424876267, 3386.093163495573, 3254.3815559758523, 3136.90996141146, 3048.6934734702136, 2957.186911982338, 2855.333306370868, 2802.0220333671496, 2712.5965714921504, 2657.189981994876]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x244598a8630>]
K=3
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.303210 | 1.253016 | -0.394054 | 0.592598 | 0.106623 | -2.083256 | 0.858313 | 0.801936 | 2.380580 | -1.304258 | 0.556361 | 1.949530 | -1.046692 | 1 | 0 |
| 1 | 0.647559 | -1.553511 | -1.648243 | -1.010792 | -0.857927 | 0.335856 | 0.884468 | 0.250703 | 0.298648 | 0.683922 | 1.599907 | 0.349480 | -0.484103 | 1 | 0 |
| 2 | 2.266625 | -0.333664 | 0.685765 | -2.001143 | -0.820018 | -2.442242 | -1.583451 | -2.793213 | -2.158376 | -2.431307 | -0.855856 | -0.471626 | -1.478884 | 0 | 0 |
| 3 | 0.672266 | -0.397422 | 0.105106 | -1.822060 | -1.335294 | -1.384110 | -0.608579 | -1.639581 | -2.081721 | -1.171877 | -1.102493 | -1.264328 | -1.165166 | 0 | 0 |
| 4 | 0.747622 | 0.110331 | -0.079109 | -1.108698 | -0.391749 | -0.448919 | 0.133859 | -0.843237 | -1.924086 | -0.222835 | 0.221819 | 0.017631 | -0.725177 | 0 | 0 |
| 5 | 2.072441 | -1.565884 | -0.268750 | -1.648648 | -3.149219 | -3.406801 | -4.332461 | -1.709990 | -1.313696 | -1.503431 | -2.036749 | -1.928213 | -2.657430 | 0 | 0 |
| 6 | -0.163654 | 0.470736 | 0.440011 | 0.594090 | -1.227236 | 0.409218 | 0.608496 | -0.948833 | -2.051031 | 0.892327 | 0.371683 | -0.318984 | 0.022251 | 0 | 0 |
| 7 | 0.477732 | -0.217651 | -0.908178 | -1.257961 | -1.360625 | -0.551388 | -0.599896 | 0.099066 | 1.299780 | 2.443060 | 0.417236 | -0.919898 | -0.916391 | 1 | 0 |
| 8 | 0.897786 | -3.040013 | 0.311694 | -0.386220 | -0.321124 | -0.221380 | -0.207002 | -0.127210 | 0.011073 | 0.211925 | -0.400748 | -0.296623 | -0.143419 | 0 | 0 |
| 9 | -0.977087 | 1.088438 | -0.184899 | -0.626934 | 0.577247 | 0.522552 | 1.150101 | 1.023214 | 0.136257 | 0.193237 | -0.496760 | 0.367549 | 0.378679 | 1 | 0 |
| 10 | 0.328615 | -1.292300 | -1.398337 | -0.677268 | -1.070980 | -0.075073 | -0.740061 | -0.424240 | -0.216693 | 0.633892 | -0.070397 | 0.960392 | 0.403827 | 0 | 0 |
| 11 | -0.199470 | 0.110219 | 0.238637 | 0.455154 | -0.116209 | 0.374450 | 0.078145 | 0.424005 | 0.633052 | -0.153498 | -0.647002 | 0.301135 | -0.000406 | 1 | 0 |
| 12 | -0.711256 | 0.124802 | 0.734425 | -0.445078 | -0.503247 | -0.323539 | 0.236246 | -0.572803 | -0.221112 | -0.206486 | -0.180516 | 0.119335 | 0.027470 | 0 | 0 |
| 13 | -0.806898 | -0.126740 | -0.383726 | 0.035489 | -1.164460 | -0.574335 | -0.633858 | -0.009812 | -0.131411 | 0.549197 | -0.257952 | 0.307916 | 0.814674 | 0 | 0 |
| 14 | -0.077242 | -0.331495 | 0.550493 | -0.008575 | -0.215759 | -1.260552 | -0.581296 | 0.369790 | -0.684267 | 0.792489 | -0.457321 | -0.704205 | -0.093986 | 0 | 0 |
| 15 | 0.244538 | 0.777957 | 0.464181 | 0.169574 | -0.433604 | -1.172185 | -1.866928 | 0.759778 | -0.372608 | 0.009766 | 0.964104 | 1.082661 | -0.506505 | 0 | 0 |
| 16 | 0.602329 | -0.035069 | 0.178352 | -0.036690 | 0.180302 | -0.769568 | 0.364535 | 0.996915 | 0.263984 | -0.829872 | -0.133422 | 0.601135 | -1.217336 | 1 | 0 |
| 17 | -0.570258 | -0.759570 | 0.108993 | 0.657477 | 0.342355 | -0.903388 | 0.112467 | -0.669060 | -0.661619 | 0.915675 | 1.620722 | -0.160697 | 0.379275 | 0 | 0 |
| 18 | -0.288268 | -1.202534 | -0.544058 | 0.295908 | 0.568680 | -1.416228 | 0.423676 | 0.041836 | -0.665694 | 0.699155 | -0.070704 | -0.429451 | 1.194384 | 0 | 0 |
| 19 | -0.497305 | -0.552590 | 0.332470 | 0.660607 | 0.293725 | -0.945647 | -1.269354 | 0.464095 | 1.166255 | 2.034233 | 2.037855 | 0.555927 | 0.423683 | 1 | 0 |
| 20 | 1.386141 | -0.516432 | -0.074640 | 0.751101 | -1.151864 | 0.155819 | -1.921431 | -3.381158 | -1.145758 | -1.197084 | 0.654749 | 1.636425 | 0.993236 | 0 | 0 |
| 21 | 0.076772 | 0.072900 | 0.122544 | 0.799017 | -1.121011 | -0.137599 | -1.150187 | -1.669293 | -1.110882 | -0.047217 | -0.034112 | -0.659214 | 1.160642 | 0 | 0 |
| 22 | 0.670757 | -0.167252 | -0.352765 | 0.189499 | -1.232602 | -0.168579 | -1.559900 | -1.850665 | -1.416478 | 0.031846 | 0.308193 | -0.956133 | 0.507231 | 0 | 0 |
| 23 | 0.639283 | 0.699164 | 0.621380 | -0.725771 | -0.890352 | 0.643955 | -1.097228 | 0.229756 | -0.091793 | -2.390193 | -0.825768 | -2.164531 | -0.772983 | 1 | 0 |
| 24 | -0.907399 | 2.155157 | 0.873522 | 1.655111 | 0.871099 | 1.083262 | -0.186962 | 0.373227 | -0.354082 | 0.573586 | 0.733097 | -0.986481 | 0.727511 | 1 | 0 |
| 25 | -1.152272 | 1.601470 | 0.221927 | 1.296592 | 0.572807 | 0.581774 | -0.479257 | 0.209504 | -0.636178 | 0.574450 | 0.615706 | -0.877894 | 0.941827 | 1 | 0 |
| 26 | -0.676596 | 0.405600 | 0.553370 | 0.691531 | -0.292469 | 0.626694 | 0.080413 | 0.246868 | -0.100975 | 0.606694 | -0.024154 | -1.553730 | -0.210884 | 1 | 0 |
| 27 | -0.704834 | -0.058170 | 0.609171 | -0.735340 | -0.512747 | 0.796018 | -0.405976 | 0.502120 | 0.717380 | -1.625431 | 0.825742 | -1.663942 | -0.379395 | 1 | 0 |
| 28 | 0.273095 | 1.014503 | -0.772750 | -0.065028 | -0.513485 | 0.235377 | -0.266144 | 1.373964 | 0.711880 | -1.261758 | 1.106463 | 0.515863 | 0.555866 | 1 | 0 |
| 29 | 1.216372 | 0.637021 | 0.649194 | 0.099873 | -0.816614 | 0.555439 | -1.272918 | -0.035862 | 0.154194 | -1.797465 | -0.177830 | -1.702118 | -1.136716 | 1 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 342 | 0.056741 | 0.169776 | 0.434163 | -0.208821 | 0.498957 | 0.349198 | 0.547068 | 0.250228 | -0.743894 | -0.497075 | 0.373497 | -0.025547 | 0.260645 | 1 | 1 |
| 343 | -0.845812 | -0.163165 | 0.268174 | 1.312135 | 1.241686 | 1.484484 | 1.279782 | -0.350179 | -0.266719 | -0.170434 | 0.147000 | -0.259175 | -0.323251 | 1 | 1 |
| 344 | 0.056854 | 0.089458 | -0.128149 | 0.123107 | -0.879175 | 0.172486 | 0.919301 | 0.727007 | -0.032509 | -0.594358 | -0.241536 | -0.339538 | -1.563800 | 1 | 1 |
| 345 | 1.322735 | -0.970372 | -1.058427 | 1.018282 | -1.228871 | 0.835533 | 1.462831 | -1.481872 | -2.024441 | 0.388890 | 2.395768 | -0.993539 | 0.301816 | 0 | 1 |
| 346 | -0.159679 | -0.200313 | -0.181878 | 0.221536 | -0.604018 | 0.554979 | 0.173592 | -1.137738 | -1.525377 | -0.382164 | 1.156959 | 0.545188 | -0.873936 | 0 | 1 |
| 347 | -0.510690 | -0.141874 | -0.170690 | -0.486309 | -1.066447 | -1.098392 | -1.513393 | -0.202811 | 0.062343 | 0.446348 | -0.029988 | -0.024432 | -0.978036 | 0 | 1 |
| 348 | 0.441393 | 0.403987 | 0.538948 | 1.253198 | -0.158511 | 0.497768 | 0.151471 | -0.006025 | 0.213458 | 0.119760 | -0.002312 | 0.139434 | -0.401118 | 1 | 1 |
| 349 | 0.548477 | 0.987769 | 0.505748 | 0.779668 | 0.504327 | -0.003400 | 0.200264 | 0.287803 | 0.084852 | -0.044437 | 0.769553 | 0.169816 | -0.581506 | 1 | 1 |
| 350 | 0.278851 | -0.150632 | 1.015313 | 0.158731 | -1.435466 | -0.910636 | 1.526971 | 0.810376 | -0.088268 | 2.273901 | 1.895682 | -0.573207 | 1.173543 | 1 | 1 |
| 351 | 1.781784 | -0.680962 | -0.140043 | 1.730156 | 0.760657 | 1.081874 | 0.686370 | -0.456141 | -0.310319 | 0.443108 | 0.067726 | -0.804283 | 0.268616 | 1 | 1 |
| 352 | 1.110023 | -0.419764 | -0.451242 | 1.471440 | 0.860531 | 0.858025 | 1.016472 | 0.013533 | -0.532955 | 0.597255 | -0.385255 | -1.299309 | 0.869963 | 1 | 1 |
| 353 | 0.463780 | 0.094111 | 0.074193 | 0.457058 | -0.494585 | -0.741218 | -1.615368 | -0.323890 | 0.179301 | -0.914854 | -0.881275 | -0.284568 | 0.516848 | 0 | 1 |
| 354 | 0.162857 | 1.300630 | -0.374191 | -0.148478 | -0.275205 | 0.936621 | -0.301931 | 0.926288 | -0.242039 | -1.217862 | -0.849053 | 0.381655 | 1.521222 | 1 | 1 |
| 355 | -0.261040 | 1.897992 | 0.324175 | 0.250461 | -0.326921 | 0.078347 | -0.794723 | 1.245895 | 0.561437 | 0.299601 | 0.612062 | 0.375109 | 0.668225 | 1 | 1 |
| 356 | -2.412627 | -0.912657 | 0.924859 | 1.091412 | -0.430459 | 0.991776 | 0.577087 | 0.366311 | 0.916132 | -0.010096 | -0.337066 | 0.723121 | 0.634413 | 1 | 1 |
| 357 | -1.610420 | -0.171488 | 1.308910 | 1.557149 | -0.783120 | 1.055891 | 0.070922 | 0.736289 | 0.651236 | -0.209692 | -0.293388 | 0.549580 | 0.947465 | 1 | 1 |
| 358 | -1.627642 | -0.225022 | 1.420291 | 1.585386 | -0.623077 | 1.204209 | 0.203574 | 0.815228 | 0.701131 | -0.111706 | -0.352897 | 0.552444 | 1.038487 | 1 | 1 |
| 359 | 1.658650 | 0.261694 | 0.694273 | -0.634006 | -0.742717 | -1.107684 | -0.040641 | 0.685375 | 0.704374 | 0.457634 | -0.012812 | -0.227444 | -0.311482 | 1 | 1 |
| 360 | -0.472450 | 1.290735 | 1.251486 | 0.902820 | 1.064267 | 0.319911 | 0.273062 | -0.004026 | -0.730129 | -0.487802 | -0.590033 | 0.917054 | 0.316796 | 1 | 1 |
| 361 | 0.145973 | 1.078298 | -0.110458 | 0.396705 | 0.465683 | 0.120005 | 0.324478 | 0.647014 | 0.406366 | 0.303529 | 0.342183 | 0.418467 | -0.257006 | 1 | 1 |
| 362 | 1.354053 | 0.408020 | -1.449365 | -0.144038 | 0.735070 | 1.458916 | -0.253049 | 0.476118 | 1.309448 | 1.981607 | 0.319930 | -0.734588 | -2.427842 | 1 | 1 |
| 363 | 3.546326 | -0.337767 | -0.983896 | -3.155084 | 1.922015 | 3.128359 | 1.576092 | 2.767242 | 2.734920 | 1.749030 | -1.432287 | -5.486282 | -3.776088 | 2 | 1 |
| 364 | 3.564797 | -0.492960 | -0.663172 | -2.465245 | 2.044991 | 3.045697 | 1.746383 | 2.238430 | 2.806354 | 2.318786 | -0.732814 | -5.203217 | -4.762769 | 2 | 1 |
| 365 | -0.480041 | 0.390140 | 0.283493 | 0.710367 | 0.436247 | 0.787936 | 0.149057 | 1.081200 | 1.130496 | 0.783116 | 1.174331 | 0.987069 | 1.027523 | 1 | 1 |
| 366 | 1.601344 | 1.120977 | 0.942690 | 0.218542 | 1.432015 | 1.975393 | 1.352637 | 0.851851 | 0.707687 | 1.420656 | -1.301018 | 0.996552 | 2.286308 | 1 | 1 |
| 367 | -1.388425 | 0.554214 | 2.322455 | 0.125526 | 0.168411 | 1.459935 | -0.011567 | 0.377516 | 1.801634 | 1.061665 | 1.432895 | 1.553952 | 0.335629 | 1 | 1 |
| 368 | -0.560849 | 0.191976 | -1.558597 | -1.925355 | 0.006144 | 1.582531 | 2.334107 | 1.472221 | 0.788027 | -0.066399 | -0.539592 | -0.089987 | 1.081026 | 1 | 1 |
| 369 | -0.015724 | 0.095384 | -0.050287 | 0.330265 | -0.857518 | -1.110181 | 0.457976 | 1.235890 | 0.937447 | 1.294180 | 1.196429 | 1.964913 | 0.788473 | 1 | 1 |
| 370 | -0.320091 | 0.789370 | -0.347116 | -0.257819 | -0.264866 | 0.119392 | 0.174557 | -0.102622 | -0.147261 | 0.927949 | 1.494185 | 0.617596 | -0.119013 | 1 | 1 |
| 371 | -1.397911 | 0.969347 | -0.218602 | 0.165675 | -1.187201 | -1.022111 | 0.090487 | -0.281353 | -1.520146 | -0.950575 | -1.733689 | -0.924046 | -0.731033 | 0 | 1 |
372 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x2445d307358>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[0]))
X = df_n_ps_std_mfcc[1]
y = df_n_ps[1]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(279, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (20,), 'learning_rate_init': 0.006, 'max_iter': 300}, que permiten obtener un Accuracy de 82.08% y un Kappa del 43.49
Tiempo total: 29.12 minutos
C:\ProgramData\Anaconda3\lib\site-packages\sklearn\neural_network\multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (300) reached and the optimization hasn't converged yet. % self.max_iter, ConvergenceWarning)
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = 0.006
epochs = 300
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_3 (Dense) (None, 20) 280 _________________________________________________________________ dense_4 (Dense) (None, 1) 21 ================================================================= Total params: 301 Trainable params: 301 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 279 samples, validate on 93 samples Epoch 1/300 279/279 [==============================] - 1s 3ms/step - loss: 0.7063 - accuracy: 0.5197 - val_loss: 0.6660 - val_accuracy: 0.5914 Epoch 2/300 279/279 [==============================] - 0s 61us/step - loss: 0.6003 - accuracy: 0.6918 - val_loss: 0.5741 - val_accuracy: 0.7419 Epoch 3/300 279/279 [==============================] - 0s 68us/step - loss: 0.5317 - accuracy: 0.7348 - val_loss: 0.5278 - val_accuracy: 0.7527 Epoch 4/300 279/279 [==============================] - 0s 72us/step - loss: 0.4996 - accuracy: 0.7634 - val_loss: 0.5053 - val_accuracy: 0.7742 Epoch 5/300 279/279 [==============================] - 0s 75us/step - loss: 0.4770 - accuracy: 0.7778 - val_loss: 0.4938 - val_accuracy: 0.8065 Epoch 6/300 279/279 [==============================] - 0s 90us/step - loss: 0.4639 - accuracy: 0.7778 - val_loss: 0.4845 - val_accuracy: 0.7957 Epoch 7/300 279/279 [==============================] - 0s 82us/step - loss: 0.4526 - accuracy: 0.7849 - val_loss: 0.4813 - val_accuracy: 0.8172 Epoch 8/300 279/279 [==============================] - 0s 75us/step - loss: 0.4466 - accuracy: 0.7921 - val_loss: 0.4822 - val_accuracy: 0.8065 Epoch 9/300 279/279 [==============================] - 0s 79us/step - loss: 0.4412 - accuracy: 0.7993 - val_loss: 0.4819 - val_accuracy: 0.8172 Epoch 10/300 279/279 [==============================] - 0s 82us/step - loss: 0.4373 - accuracy: 0.8065 - val_loss: 0.4840 - val_accuracy: 0.7957 Epoch 11/300 279/279 [==============================] - 0s 86us/step - loss: 0.4343 - accuracy: 0.8136 - val_loss: 0.4827 - val_accuracy: 0.7957 Epoch 12/300 279/279 [==============================] - 0s 75us/step - loss: 0.4328 - accuracy: 0.8100 - val_loss: 0.4866 - val_accuracy: 0.7849 Epoch 13/300 279/279 [==============================] - 0s 82us/step - loss: 0.4225 - accuracy: 0.8136 - val_loss: 0.4860 - val_accuracy: 0.7957 Epoch 14/300 279/279 [==============================] - 0s 75us/step - loss: 0.4186 - accuracy: 0.8136 - val_loss: 0.4865 - val_accuracy: 0.7849 Epoch 15/300 279/279 [==============================] - 0s 82us/step - loss: 0.4118 - accuracy: 0.8136 - val_loss: 0.4846 - val_accuracy: 0.7849 Epoch 16/300 279/279 [==============================] - 0s 79us/step - loss: 0.4080 - accuracy: 0.8208 - val_loss: 0.4901 - val_accuracy: 0.7849 Epoch 17/300 279/279 [==============================] - 0s 107us/step - loss: 0.4009 - accuracy: 0.8351 - val_loss: 0.4878 - val_accuracy: 0.7742 Epoch 00017: ReduceLROnPlateau reducing learning rate to 0.003000000026077032. Epoch 18/300 279/279 [==============================] - 0s 107us/step - loss: 0.3950 - accuracy: 0.8387 - val_loss: 0.4864 - val_accuracy: 0.7742 Epoch 19/300 279/279 [==============================] - 0s 68us/step - loss: 0.3922 - accuracy: 0.8387 - val_loss: 0.4852 - val_accuracy: 0.7742 Epoch 20/300 279/279 [==============================] - 0s 86us/step - loss: 0.3886 - accuracy: 0.8530 - val_loss: 0.4807 - val_accuracy: 0.7849 Epoch 21/300 279/279 [==============================] - 0s 64us/step - loss: 0.3865 - accuracy: 0.8566 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 22/300 279/279 [==============================] - 0s 82us/step - loss: 0.3828 - accuracy: 0.8530 - val_loss: 0.4848 - val_accuracy: 0.7849 Epoch 23/300 279/279 [==============================] - 0s 79us/step - loss: 0.3800 - accuracy: 0.8566 - val_loss: 0.4855 - val_accuracy: 0.7849 Epoch 24/300 279/279 [==============================] - 0s 68us/step - loss: 0.3760 - accuracy: 0.8638 - val_loss: 0.4818 - val_accuracy: 0.7849 Epoch 25/300 279/279 [==============================] - 0s 79us/step - loss: 0.3744 - accuracy: 0.8602 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 26/300 279/279 [==============================] - 0s 86us/step - loss: 0.3705 - accuracy: 0.8710 - val_loss: 0.4781 - val_accuracy: 0.7849 Epoch 27/300 279/279 [==============================] - 0s 86us/step - loss: 0.3666 - accuracy: 0.8746 - val_loss: 0.4785 - val_accuracy: 0.7849 Epoch 00027: ReduceLROnPlateau reducing learning rate to 0.001500000013038516. Epoch 28/300 279/279 [==============================] - 0s 79us/step - loss: 0.3637 - accuracy: 0.8746 - val_loss: 0.4799 - val_accuracy: 0.7849 Epoch 29/300 279/279 [==============================] - 0s 75us/step - loss: 0.3620 - accuracy: 0.8746 - val_loss: 0.4816 - val_accuracy: 0.7849 Epoch 30/300 279/279 [==============================] - 0s 79us/step - loss: 0.3604 - accuracy: 0.8746 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 31/300 279/279 [==============================] - 0s 75us/step - loss: 0.3596 - accuracy: 0.8746 - val_loss: 0.4831 - val_accuracy: 0.7849 Epoch 32/300 279/279 [==============================] - 0s 79us/step - loss: 0.3572 - accuracy: 0.8781 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 33/300 279/279 [==============================] - 0s 79us/step - loss: 0.3555 - accuracy: 0.8781 - val_loss: 0.4817 - val_accuracy: 0.7849 Epoch 34/300 279/279 [==============================] - 0s 79us/step - loss: 0.3540 - accuracy: 0.8817 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 35/300 279/279 [==============================] - 0s 82us/step - loss: 0.3527 - accuracy: 0.8817 - val_loss: 0.4817 - val_accuracy: 0.7849 Epoch 36/300 279/279 [==============================] - 0s 75us/step - loss: 0.3510 - accuracy: 0.8817 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 37/300 279/279 [==============================] - 0s 104us/step - loss: 0.3493 - accuracy: 0.8781 - val_loss: 0.4836 - val_accuracy: 0.7849 Epoch 00037: ReduceLROnPlateau reducing learning rate to 0.000750000006519258. Epoch 38/300 279/279 [==============================] - 0s 86us/step - loss: 0.3476 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 39/300 279/279 [==============================] - 0s 82us/step - loss: 0.3469 - accuracy: 0.8817 - val_loss: 0.4823 - val_accuracy: 0.7849 Epoch 40/300 279/279 [==============================] - 0s 75us/step - loss: 0.3457 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 41/300 279/279 [==============================] - 0s 82us/step - loss: 0.3449 - accuracy: 0.8817 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 42/300 279/279 [==============================] - 0s 90us/step - loss: 0.3443 - accuracy: 0.8817 - val_loss: 0.4830 - val_accuracy: 0.7849 Epoch 43/300 279/279 [==============================] - 0s 104us/step - loss: 0.3434 - accuracy: 0.8817 - val_loss: 0.4833 - val_accuracy: 0.7849 Epoch 44/300 279/279 [==============================] - 0s 93us/step - loss: 0.3427 - accuracy: 0.8817 - val_loss: 0.4835 - val_accuracy: 0.7849 Epoch 45/300 279/279 [==============================] - 0s 90us/step - loss: 0.3418 - accuracy: 0.8817 - val_loss: 0.4831 - val_accuracy: 0.7849 Epoch 46/300 279/279 [==============================] - 0s 93us/step - loss: 0.3411 - accuracy: 0.8817 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 47/300 279/279 [==============================] - 0s 97us/step - loss: 0.3403 - accuracy: 0.8817 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 00047: ReduceLROnPlateau reducing learning rate to 0.000375000003259629. Epoch 48/300 279/279 [==============================] - 0s 111us/step - loss: 0.3394 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 49/300 279/279 [==============================] - 0s 100us/step - loss: 0.3389 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 50/300 279/279 [==============================] - 0s 111us/step - loss: 0.3386 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 51/300 279/279 [==============================] - 0s 115us/step - loss: 0.3381 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 52/300 279/279 [==============================] - 0s 136us/step - loss: 0.3378 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 53/300 279/279 [==============================] - 0s 118us/step - loss: 0.3373 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 54/300 279/279 [==============================] - 0s 86us/step - loss: 0.3370 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 55/300 279/279 [==============================] - 0s 115us/step - loss: 0.3365 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 56/300 279/279 [==============================] - 0s 107us/step - loss: 0.3362 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 57/300 279/279 [==============================] - 0s 122us/step - loss: 0.3358 - accuracy: 0.8853 - val_loss: 0.4830 - val_accuracy: 0.7849 Epoch 00057: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145. Epoch 58/300 279/279 [==============================] - 0s 104us/step - loss: 0.3353 - accuracy: 0.8853 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 59/300 279/279 [==============================] - 0s 107us/step - loss: 0.3351 - accuracy: 0.8853 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 60/300 279/279 [==============================] - 0s 104us/step - loss: 0.3349 - accuracy: 0.8853 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 61/300 279/279 [==============================] - 0s 104us/step - loss: 0.3347 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 62/300 279/279 [==============================] - 0s 100us/step - loss: 0.3345 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 63/300 279/279 [==============================] - 0s 79us/step - loss: 0.3343 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 64/300 279/279 [==============================] - 0s 90us/step - loss: 0.3341 - accuracy: 0.8889 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 65/300 279/279 [==============================] - 0s 93us/step - loss: 0.3339 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 66/300 279/279 [==============================] - 0s 104us/step - loss: 0.3337 - accuracy: 0.8889 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 67/300 279/279 [==============================] - 0s 79us/step - loss: 0.3335 - accuracy: 0.8889 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 00067: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05. Epoch 68/300 279/279 [==============================] - 0s 100us/step - loss: 0.3333 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 69/300 279/279 [==============================] - 0s 104us/step - loss: 0.3332 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 70/300 279/279 [==============================] - 0s 90us/step - loss: 0.3331 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 71/300 279/279 [==============================] - 0s 100us/step - loss: 0.3330 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 72/300 279/279 [==============================] - 0s 107us/step - loss: 0.3329 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 73/300 279/279 [==============================] - 0s 104us/step - loss: 0.3328 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 74/300 279/279 [==============================] - 0s 118us/step - loss: 0.3327 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 75/300 279/279 [==============================] - 0s 100us/step - loss: 0.3326 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 76/300 279/279 [==============================] - 0s 90us/step - loss: 0.3325 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 77/300 279/279 [==============================] - 0s 72us/step - loss: 0.3324 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00077: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05. Epoch 78/300 279/279 [==============================] - 0s 111us/step - loss: 0.3323 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 79/300 279/279 [==============================] - 0s 97us/step - loss: 0.3322 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 80/300 279/279 [==============================] - 0s 97us/step - loss: 0.3322 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 81/300 279/279 [==============================] - 0s 97us/step - loss: 0.3321 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 82/300 279/279 [==============================] - 0s 104us/step - loss: 0.3321 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 83/300 279/279 [==============================] - 0s 86us/step - loss: 0.3320 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 84/300 279/279 [==============================] - 0s 86us/step - loss: 0.3320 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 85/300 279/279 [==============================] - 0s 111us/step - loss: 0.3319 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 86/300 279/279 [==============================] - 0s 93us/step - loss: 0.3319 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 87/300 279/279 [==============================] - 0s 90us/step - loss: 0.3318 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00087: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05. Epoch 88/300 279/279 [==============================] - 0s 90us/step - loss: 0.3318 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 89/300 279/279 [==============================] - 0s 104us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 90/300 279/279 [==============================] - 0s 100us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 91/300 279/279 [==============================] - 0s 93us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 92/300 279/279 [==============================] - 0s 93us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 93/300 279/279 [==============================] - 0s 93us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 94/300 279/279 [==============================] - 0s 100us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 95/300 279/279 [==============================] - 0s 90us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 96/300 279/279 [==============================] - 0s 97us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 97/300 279/279 [==============================] - 0s 100us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00097: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05. Epoch 98/300 279/279 [==============================] - 0s 97us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 99/300 279/279 [==============================] - ETA: 0s - loss: 0.3474 - accuracy: 0.81 - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 100/300 279/279 [==============================] - ETA: 0s - loss: 0.2551 - accuracy: 0.93 - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 101/300 279/279 [==============================] - 0s 107us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 102/300 279/279 [==============================] - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 103/300 279/279 [==============================] - 0s 86us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 104/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 105/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 106/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 107/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00107: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06. Epoch 108/300 279/279 [==============================] - 0s 100us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 109/300 279/279 [==============================] - ETA: 0s - loss: 0.2823 - accuracy: 0.90 - 0s 100us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 110/300 279/279 [==============================] - 0s 90us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 111/300 279/279 [==============================] - 0s 72us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 112/300 279/279 [==============================] - 0s 107us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 113/300 279/279 [==============================] - 0s 86us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 114/300 279/279 [==============================] - 0s 72us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 115/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 116/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 117/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00117: ReduceLROnPlateau reducing learning rate to 2.9296875254658516e-06. Epoch 118/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 119/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 120/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 121/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 122/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 123/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 124/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 125/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 126/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 127/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00127: ReduceLROnPlateau reducing learning rate to 1.4648437627329258e-06. Epoch 128/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 129/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 130/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 131/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 132/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 133/300 279/279 [==============================] - 0s 125us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 134/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 135/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 136/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 137/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00137: ReduceLROnPlateau reducing learning rate to 7.324218813664629e-07. Epoch 138/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 139/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 140/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 141/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 142/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 143/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 144/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 145/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 146/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 147/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00147: ReduceLROnPlateau reducing learning rate to 3.6621094068323146e-07. Epoch 148/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 149/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 150/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 151/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 152/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 153/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 154/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 155/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 156/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 157/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00157: ReduceLROnPlateau reducing learning rate to 1.8310547034161573e-07. Epoch 158/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 159/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 160/300 279/279 [==============================] - 0s 133us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 161/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 162/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 163/300 279/279 [==============================] - 0s 68us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 164/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 165/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 166/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 167/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00167: ReduceLROnPlateau reducing learning rate to 9.155273517080786e-08. Epoch 168/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 169/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 170/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 171/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 172/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 173/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 174/300 279/279 [==============================] - 0s 168us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 175/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 176/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 177/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00177: ReduceLROnPlateau reducing learning rate to 4.577636758540393e-08. Epoch 178/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 179/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 180/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 181/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 182/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 183/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 184/300 279/279 [==============================] - ETA: 0s - loss: 0.2749 - accuracy: 0.96 - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 185/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 186/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 187/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00187: ReduceLROnPlateau reducing learning rate to 2.2888183792701966e-08. Epoch 188/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 189/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 190/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 191/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 192/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 193/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 194/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 195/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 196/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 197/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00197: ReduceLROnPlateau reducing learning rate to 1.1444091896350983e-08. Epoch 198/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 199/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 200/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 201/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 202/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 203/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 204/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 205/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 206/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 207/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00207: ReduceLROnPlateau reducing learning rate to 5.7220459481754915e-09. Epoch 208/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 209/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 210/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 211/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 212/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 213/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 214/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 215/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 216/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 217/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00217: ReduceLROnPlateau reducing learning rate to 2.8610229740877458e-09. Epoch 218/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 219/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 220/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 221/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 222/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 223/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 224/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 225/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 226/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 227/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00227: ReduceLROnPlateau reducing learning rate to 1.4305114870438729e-09. Epoch 228/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 229/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 230/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 231/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 232/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 233/300 279/279 [==============================] - 0s 122us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 234/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 235/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 236/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 237/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00237: ReduceLROnPlateau reducing learning rate to 7.152557435219364e-10. Epoch 238/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 239/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 240/300 279/279 [==============================] - 0s 68us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 241/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 242/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 243/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 244/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 245/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 246/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 247/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00247: ReduceLROnPlateau reducing learning rate to 3.576278717609682e-10. Epoch 248/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 249/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 250/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 251/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 252/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 253/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 254/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 255/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 256/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 257/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00257: ReduceLROnPlateau reducing learning rate to 1.788139358804841e-10. Epoch 258/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 259/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 260/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 261/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 262/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 263/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 264/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 265/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 266/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 267/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00267: ReduceLROnPlateau reducing learning rate to 8.940696794024205e-11. Epoch 268/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 269/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 270/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 271/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 272/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 273/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 274/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 275/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 276/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 277/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00277: ReduceLROnPlateau reducing learning rate to 4.470348397012103e-11. Epoch 278/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 279/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 280/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 281/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 282/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 283/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 284/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 285/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 286/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 287/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00287: ReduceLROnPlateau reducing learning rate to 2.2351741985060514e-11. Epoch 288/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 289/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 290/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 291/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 292/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 293/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 294/300 279/279 [==============================] - 0s 118us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 295/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 296/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 297/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00297: ReduceLROnPlateau reducing learning rate to 1.1175870992530257e-11. Epoch 298/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 299/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 300/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 300)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
93/93 [==============================] - 0s 54us/step test loss: 0.48277782432494626, test accuracy: 0.7849462628364563
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.7282608695652174
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.366754617414248
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.303210 | 1.253016 | -0.394054 | 0.592598 | 0.106623 | -2.083256 | 0.858313 | 0.801936 | 2.380580 | -1.304258 | 0.556361 | 1.949530 | -1.046692 |
| 1 | 0.647559 | -1.553511 | -1.648243 | -1.010792 | -0.857927 | 0.335856 | 0.884468 | 0.250703 | 0.298648 | 0.683922 | 1.599907 | 0.349480 | -0.484103 |
| 2 | 2.266625 | -0.333664 | 0.685765 | -2.001143 | -0.820018 | -2.442242 | -1.583451 | -2.793213 | -2.158376 | -2.431307 | -0.855856 | -0.471626 | -1.478884 |
| 3 | 0.672266 | -0.397422 | 0.105106 | -1.822060 | -1.335294 | -1.384110 | -0.608579 | -1.639581 | -2.081721 | -1.171877 | -1.102493 | -1.264328 | -1.165166 |
| 4 | 0.747622 | 0.110331 | -0.079109 | -1.108698 | -0.391749 | -0.448919 | 0.133859 | -0.843237 | -1.924086 | -0.222835 | 0.221819 | 0.017631 | -0.725177 |
| 5 | 2.072441 | -1.565884 | -0.268750 | -1.648648 | -3.149219 | -3.406801 | -4.332461 | -1.709990 | -1.313696 | -1.503431 | -2.036749 | -1.928213 | -2.657430 |
| 6 | -0.163654 | 0.470736 | 0.440011 | 0.594090 | -1.227236 | 0.409218 | 0.608496 | -0.948833 | -2.051031 | 0.892327 | 0.371683 | -0.318984 | 0.022251 |
| 7 | 0.477732 | -0.217651 | -0.908178 | -1.257961 | -1.360625 | -0.551388 | -0.599896 | 0.099066 | 1.299780 | 2.443060 | 0.417236 | -0.919898 | -0.916391 |
| 8 | 0.897786 | -3.040013 | 0.311694 | -0.386220 | -0.321124 | -0.221380 | -0.207002 | -0.127210 | 0.011073 | 0.211925 | -0.400748 | -0.296623 | -0.143419 |
| 9 | -0.977087 | 1.088438 | -0.184899 | -0.626934 | 0.577247 | 0.522552 | 1.150101 | 1.023214 | 0.136257 | 0.193237 | -0.496760 | 0.367549 | 0.378679 |
| 10 | 0.328615 | -1.292300 | -1.398337 | -0.677268 | -1.070980 | -0.075073 | -0.740061 | -0.424240 | -0.216693 | 0.633892 | -0.070397 | 0.960392 | 0.403827 |
| 11 | -0.199470 | 0.110219 | 0.238637 | 0.455154 | -0.116209 | 0.374450 | 0.078145 | 0.424005 | 0.633052 | -0.153498 | -0.647002 | 0.301135 | -0.000406 |
| 12 | -0.711256 | 0.124802 | 0.734425 | -0.445078 | -0.503247 | -0.323539 | 0.236246 | -0.572803 | -0.221112 | -0.206486 | -0.180516 | 0.119335 | 0.027470 |
| 13 | -0.806898 | -0.126740 | -0.383726 | 0.035489 | -1.164460 | -0.574335 | -0.633858 | -0.009812 | -0.131411 | 0.549197 | -0.257952 | 0.307916 | 0.814674 |
| 14 | -0.077242 | -0.331495 | 0.550493 | -0.008575 | -0.215759 | -1.260552 | -0.581296 | 0.369790 | -0.684267 | 0.792489 | -0.457321 | -0.704205 | -0.093986 |
| 15 | 0.244538 | 0.777957 | 0.464181 | 0.169574 | -0.433604 | -1.172185 | -1.866928 | 0.759778 | -0.372608 | 0.009766 | 0.964104 | 1.082661 | -0.506505 |
| 16 | 0.602329 | -0.035069 | 0.178352 | -0.036690 | 0.180302 | -0.769568 | 0.364535 | 0.996915 | 0.263984 | -0.829872 | -0.133422 | 0.601135 | -1.217336 |
| 17 | -0.570258 | -0.759570 | 0.108993 | 0.657477 | 0.342355 | -0.903388 | 0.112467 | -0.669060 | -0.661619 | 0.915675 | 1.620722 | -0.160697 | 0.379275 |
| 18 | -0.288268 | -1.202534 | -0.544058 | 0.295908 | 0.568680 | -1.416228 | 0.423676 | 0.041836 | -0.665694 | 0.699155 | -0.070704 | -0.429451 | 1.194384 |
| 19 | -0.497305 | -0.552590 | 0.332470 | 0.660607 | 0.293725 | -0.945647 | -1.269354 | 0.464095 | 1.166255 | 2.034233 | 2.037855 | 0.555927 | 0.423683 |
| 20 | 1.386141 | -0.516432 | -0.074640 | 0.751101 | -1.151864 | 0.155819 | -1.921431 | -3.381158 | -1.145758 | -1.197084 | 0.654749 | 1.636425 | 0.993236 |
| 21 | 0.076772 | 0.072900 | 0.122544 | 0.799017 | -1.121011 | -0.137599 | -1.150187 | -1.669293 | -1.110882 | -0.047217 | -0.034112 | -0.659214 | 1.160642 |
| 22 | 0.670757 | -0.167252 | -0.352765 | 0.189499 | -1.232602 | -0.168579 | -1.559900 | -1.850665 | -1.416478 | 0.031846 | 0.308193 | -0.956133 | 0.507231 |
| 23 | 0.639283 | 0.699164 | 0.621380 | -0.725771 | -0.890352 | 0.643955 | -1.097228 | 0.229756 | -0.091793 | -2.390193 | -0.825768 | -2.164531 | -0.772983 |
| 24 | -0.907399 | 2.155157 | 0.873522 | 1.655111 | 0.871099 | 1.083262 | -0.186962 | 0.373227 | -0.354082 | 0.573586 | 0.733097 | -0.986481 | 0.727511 |
| 25 | -1.152272 | 1.601470 | 0.221927 | 1.296592 | 0.572807 | 0.581774 | -0.479257 | 0.209504 | -0.636178 | 0.574450 | 0.615706 | -0.877894 | 0.941827 |
| 26 | -0.676596 | 0.405600 | 0.553370 | 0.691531 | -0.292469 | 0.626694 | 0.080413 | 0.246868 | -0.100975 | 0.606694 | -0.024154 | -1.553730 | -0.210884 |
| 27 | -0.704834 | -0.058170 | 0.609171 | -0.735340 | -0.512747 | 0.796018 | -0.405976 | 0.502120 | 0.717380 | -1.625431 | 0.825742 | -1.663942 | -0.379395 |
| 28 | 0.273095 | 1.014503 | -0.772750 | -0.065028 | -0.513485 | 0.235377 | -0.266144 | 1.373964 | 0.711880 | -1.261758 | 1.106463 | 0.515863 | 0.555866 |
| 29 | 1.216372 | 0.637021 | 0.649194 | 0.099873 | -0.816614 | 0.555439 | -1.272918 | -0.035862 | 0.154194 | -1.797465 | -0.177830 | -1.702118 | -1.136716 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 342 | 0.056741 | 0.169776 | 0.434163 | -0.208821 | 0.498957 | 0.349198 | 0.547068 | 0.250228 | -0.743894 | -0.497075 | 0.373497 | -0.025547 | 0.260645 |
| 343 | -0.845812 | -0.163165 | 0.268174 | 1.312135 | 1.241686 | 1.484484 | 1.279782 | -0.350179 | -0.266719 | -0.170434 | 0.147000 | -0.259175 | -0.323251 |
| 344 | 0.056854 | 0.089458 | -0.128149 | 0.123107 | -0.879175 | 0.172486 | 0.919301 | 0.727007 | -0.032509 | -0.594358 | -0.241536 | -0.339538 | -1.563800 |
| 345 | 1.322735 | -0.970372 | -1.058427 | 1.018282 | -1.228871 | 0.835533 | 1.462831 | -1.481872 | -2.024441 | 0.388890 | 2.395768 | -0.993539 | 0.301816 |
| 346 | -0.159679 | -0.200313 | -0.181878 | 0.221536 | -0.604018 | 0.554979 | 0.173592 | -1.137738 | -1.525377 | -0.382164 | 1.156959 | 0.545188 | -0.873936 |
| 347 | -0.510690 | -0.141874 | -0.170690 | -0.486309 | -1.066447 | -1.098392 | -1.513393 | -0.202811 | 0.062343 | 0.446348 | -0.029988 | -0.024432 | -0.978036 |
| 348 | 0.441393 | 0.403987 | 0.538948 | 1.253198 | -0.158511 | 0.497768 | 0.151471 | -0.006025 | 0.213458 | 0.119760 | -0.002312 | 0.139434 | -0.401118 |
| 349 | 0.548477 | 0.987769 | 0.505748 | 0.779668 | 0.504327 | -0.003400 | 0.200264 | 0.287803 | 0.084852 | -0.044437 | 0.769553 | 0.169816 | -0.581506 |
| 350 | 0.278851 | -0.150632 | 1.015313 | 0.158731 | -1.435466 | -0.910636 | 1.526971 | 0.810376 | -0.088268 | 2.273901 | 1.895682 | -0.573207 | 1.173543 |
| 351 | 1.781784 | -0.680962 | -0.140043 | 1.730156 | 0.760657 | 1.081874 | 0.686370 | -0.456141 | -0.310319 | 0.443108 | 0.067726 | -0.804283 | 0.268616 |
| 352 | 1.110023 | -0.419764 | -0.451242 | 1.471440 | 0.860531 | 0.858025 | 1.016472 | 0.013533 | -0.532955 | 0.597255 | -0.385255 | -1.299309 | 0.869963 |
| 353 | 0.463780 | 0.094111 | 0.074193 | 0.457058 | -0.494585 | -0.741218 | -1.615368 | -0.323890 | 0.179301 | -0.914854 | -0.881275 | -0.284568 | 0.516848 |
| 354 | 0.162857 | 1.300630 | -0.374191 | -0.148478 | -0.275205 | 0.936621 | -0.301931 | 0.926288 | -0.242039 | -1.217862 | -0.849053 | 0.381655 | 1.521222 |
| 355 | -0.261040 | 1.897992 | 0.324175 | 0.250461 | -0.326921 | 0.078347 | -0.794723 | 1.245895 | 0.561437 | 0.299601 | 0.612062 | 0.375109 | 0.668225 |
| 356 | -2.412627 | -0.912657 | 0.924859 | 1.091412 | -0.430459 | 0.991776 | 0.577087 | 0.366311 | 0.916132 | -0.010096 | -0.337066 | 0.723121 | 0.634413 |
| 357 | -1.610420 | -0.171488 | 1.308910 | 1.557149 | -0.783120 | 1.055891 | 0.070922 | 0.736289 | 0.651236 | -0.209692 | -0.293388 | 0.549580 | 0.947465 |
| 358 | -1.627642 | -0.225022 | 1.420291 | 1.585386 | -0.623077 | 1.204209 | 0.203574 | 0.815228 | 0.701131 | -0.111706 | -0.352897 | 0.552444 | 1.038487 |
| 359 | 1.658650 | 0.261694 | 0.694273 | -0.634006 | -0.742717 | -1.107684 | -0.040641 | 0.685375 | 0.704374 | 0.457634 | -0.012812 | -0.227444 | -0.311482 |
| 360 | -0.472450 | 1.290735 | 1.251486 | 0.902820 | 1.064267 | 0.319911 | 0.273062 | -0.004026 | -0.730129 | -0.487802 | -0.590033 | 0.917054 | 0.316796 |
| 361 | 0.145973 | 1.078298 | -0.110458 | 0.396705 | 0.465683 | 0.120005 | 0.324478 | 0.647014 | 0.406366 | 0.303529 | 0.342183 | 0.418467 | -0.257006 |
| 362 | 1.354053 | 0.408020 | -1.449365 | -0.144038 | 0.735070 | 1.458916 | -0.253049 | 0.476118 | 1.309448 | 1.981607 | 0.319930 | -0.734588 | -2.427842 |
| 363 | 3.546326 | -0.337767 | -0.983896 | -3.155084 | 1.922015 | 3.128359 | 1.576092 | 2.767242 | 2.734920 | 1.749030 | -1.432287 | -5.486282 | -3.776088 |
| 364 | 3.564797 | -0.492960 | -0.663172 | -2.465245 | 2.044991 | 3.045697 | 1.746383 | 2.238430 | 2.806354 | 2.318786 | -0.732814 | -5.203217 | -4.762769 |
| 365 | -0.480041 | 0.390140 | 0.283493 | 0.710367 | 0.436247 | 0.787936 | 0.149057 | 1.081200 | 1.130496 | 0.783116 | 1.174331 | 0.987069 | 1.027523 |
| 366 | 1.601344 | 1.120977 | 0.942690 | 0.218542 | 1.432015 | 1.975393 | 1.352637 | 0.851851 | 0.707687 | 1.420656 | -1.301018 | 0.996552 | 2.286308 |
| 367 | -1.388425 | 0.554214 | 2.322455 | 0.125526 | 0.168411 | 1.459935 | -0.011567 | 0.377516 | 1.801634 | 1.061665 | 1.432895 | 1.553952 | 0.335629 |
| 368 | -0.560849 | 0.191976 | -1.558597 | -1.925355 | 0.006144 | 1.582531 | 2.334107 | 1.472221 | 0.788027 | -0.066399 | -0.539592 | -0.089987 | 1.081026 |
| 369 | -0.015724 | 0.095384 | -0.050287 | 0.330265 | -0.857518 | -1.110181 | 0.457976 | 1.235890 | 0.937447 | 1.294180 | 1.196429 | 1.964913 | 0.788473 |
| 370 | -0.320091 | 0.789370 | -0.347116 | -0.257819 | -0.264866 | 0.119392 | 0.174557 | -0.102622 | -0.147261 | 0.927949 | 1.494185 | 0.617596 | -0.119013 |
| 371 | -1.397911 | 0.969347 | -0.218602 | 0.165675 | -1.187201 | -1.022111 | 0.090487 | -0.281353 | -1.520146 | -0.950575 | -1.733689 | -0.924046 | -0.731033 |
372 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[4836.0, 4367.616182778379, 3974.0733975762073, 3720.4477504020774, 3549.9823424876267, 3386.093163495573, 3254.3815559758523, 3136.90996141146, 3048.6934734702136, 2957.186911982338, 2855.333306370868, 2802.0220333671496, 2712.5965714921504, 2657.189981994876]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x244598a8630>]
K=3
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.303210 | 1.253016 | -0.394054 | 0.592598 | 0.106623 | -2.083256 | 0.858313 | 0.801936 | 2.380580 | -1.304258 | 0.556361 | 1.949530 | -1.046692 | 1 | 0 |
| 1 | 0.647559 | -1.553511 | -1.648243 | -1.010792 | -0.857927 | 0.335856 | 0.884468 | 0.250703 | 0.298648 | 0.683922 | 1.599907 | 0.349480 | -0.484103 | 1 | 0 |
| 2 | 2.266625 | -0.333664 | 0.685765 | -2.001143 | -0.820018 | -2.442242 | -1.583451 | -2.793213 | -2.158376 | -2.431307 | -0.855856 | -0.471626 | -1.478884 | 0 | 0 |
| 3 | 0.672266 | -0.397422 | 0.105106 | -1.822060 | -1.335294 | -1.384110 | -0.608579 | -1.639581 | -2.081721 | -1.171877 | -1.102493 | -1.264328 | -1.165166 | 0 | 0 |
| 4 | 0.747622 | 0.110331 | -0.079109 | -1.108698 | -0.391749 | -0.448919 | 0.133859 | -0.843237 | -1.924086 | -0.222835 | 0.221819 | 0.017631 | -0.725177 | 0 | 0 |
| 5 | 2.072441 | -1.565884 | -0.268750 | -1.648648 | -3.149219 | -3.406801 | -4.332461 | -1.709990 | -1.313696 | -1.503431 | -2.036749 | -1.928213 | -2.657430 | 0 | 0 |
| 6 | -0.163654 | 0.470736 | 0.440011 | 0.594090 | -1.227236 | 0.409218 | 0.608496 | -0.948833 | -2.051031 | 0.892327 | 0.371683 | -0.318984 | 0.022251 | 0 | 0 |
| 7 | 0.477732 | -0.217651 | -0.908178 | -1.257961 | -1.360625 | -0.551388 | -0.599896 | 0.099066 | 1.299780 | 2.443060 | 0.417236 | -0.919898 | -0.916391 | 1 | 0 |
| 8 | 0.897786 | -3.040013 | 0.311694 | -0.386220 | -0.321124 | -0.221380 | -0.207002 | -0.127210 | 0.011073 | 0.211925 | -0.400748 | -0.296623 | -0.143419 | 0 | 0 |
| 9 | -0.977087 | 1.088438 | -0.184899 | -0.626934 | 0.577247 | 0.522552 | 1.150101 | 1.023214 | 0.136257 | 0.193237 | -0.496760 | 0.367549 | 0.378679 | 1 | 0 |
| 10 | 0.328615 | -1.292300 | -1.398337 | -0.677268 | -1.070980 | -0.075073 | -0.740061 | -0.424240 | -0.216693 | 0.633892 | -0.070397 | 0.960392 | 0.403827 | 0 | 0 |
| 11 | -0.199470 | 0.110219 | 0.238637 | 0.455154 | -0.116209 | 0.374450 | 0.078145 | 0.424005 | 0.633052 | -0.153498 | -0.647002 | 0.301135 | -0.000406 | 1 | 0 |
| 12 | -0.711256 | 0.124802 | 0.734425 | -0.445078 | -0.503247 | -0.323539 | 0.236246 | -0.572803 | -0.221112 | -0.206486 | -0.180516 | 0.119335 | 0.027470 | 0 | 0 |
| 13 | -0.806898 | -0.126740 | -0.383726 | 0.035489 | -1.164460 | -0.574335 | -0.633858 | -0.009812 | -0.131411 | 0.549197 | -0.257952 | 0.307916 | 0.814674 | 0 | 0 |
| 14 | -0.077242 | -0.331495 | 0.550493 | -0.008575 | -0.215759 | -1.260552 | -0.581296 | 0.369790 | -0.684267 | 0.792489 | -0.457321 | -0.704205 | -0.093986 | 0 | 0 |
| 15 | 0.244538 | 0.777957 | 0.464181 | 0.169574 | -0.433604 | -1.172185 | -1.866928 | 0.759778 | -0.372608 | 0.009766 | 0.964104 | 1.082661 | -0.506505 | 0 | 0 |
| 16 | 0.602329 | -0.035069 | 0.178352 | -0.036690 | 0.180302 | -0.769568 | 0.364535 | 0.996915 | 0.263984 | -0.829872 | -0.133422 | 0.601135 | -1.217336 | 1 | 0 |
| 17 | -0.570258 | -0.759570 | 0.108993 | 0.657477 | 0.342355 | -0.903388 | 0.112467 | -0.669060 | -0.661619 | 0.915675 | 1.620722 | -0.160697 | 0.379275 | 0 | 0 |
| 18 | -0.288268 | -1.202534 | -0.544058 | 0.295908 | 0.568680 | -1.416228 | 0.423676 | 0.041836 | -0.665694 | 0.699155 | -0.070704 | -0.429451 | 1.194384 | 0 | 0 |
| 19 | -0.497305 | -0.552590 | 0.332470 | 0.660607 | 0.293725 | -0.945647 | -1.269354 | 0.464095 | 1.166255 | 2.034233 | 2.037855 | 0.555927 | 0.423683 | 1 | 0 |
| 20 | 1.386141 | -0.516432 | -0.074640 | 0.751101 | -1.151864 | 0.155819 | -1.921431 | -3.381158 | -1.145758 | -1.197084 | 0.654749 | 1.636425 | 0.993236 | 0 | 0 |
| 21 | 0.076772 | 0.072900 | 0.122544 | 0.799017 | -1.121011 | -0.137599 | -1.150187 | -1.669293 | -1.110882 | -0.047217 | -0.034112 | -0.659214 | 1.160642 | 0 | 0 |
| 22 | 0.670757 | -0.167252 | -0.352765 | 0.189499 | -1.232602 | -0.168579 | -1.559900 | -1.850665 | -1.416478 | 0.031846 | 0.308193 | -0.956133 | 0.507231 | 0 | 0 |
| 23 | 0.639283 | 0.699164 | 0.621380 | -0.725771 | -0.890352 | 0.643955 | -1.097228 | 0.229756 | -0.091793 | -2.390193 | -0.825768 | -2.164531 | -0.772983 | 1 | 0 |
| 24 | -0.907399 | 2.155157 | 0.873522 | 1.655111 | 0.871099 | 1.083262 | -0.186962 | 0.373227 | -0.354082 | 0.573586 | 0.733097 | -0.986481 | 0.727511 | 1 | 0 |
| 25 | -1.152272 | 1.601470 | 0.221927 | 1.296592 | 0.572807 | 0.581774 | -0.479257 | 0.209504 | -0.636178 | 0.574450 | 0.615706 | -0.877894 | 0.941827 | 1 | 0 |
| 26 | -0.676596 | 0.405600 | 0.553370 | 0.691531 | -0.292469 | 0.626694 | 0.080413 | 0.246868 | -0.100975 | 0.606694 | -0.024154 | -1.553730 | -0.210884 | 1 | 0 |
| 27 | -0.704834 | -0.058170 | 0.609171 | -0.735340 | -0.512747 | 0.796018 | -0.405976 | 0.502120 | 0.717380 | -1.625431 | 0.825742 | -1.663942 | -0.379395 | 1 | 0 |
| 28 | 0.273095 | 1.014503 | -0.772750 | -0.065028 | -0.513485 | 0.235377 | -0.266144 | 1.373964 | 0.711880 | -1.261758 | 1.106463 | 0.515863 | 0.555866 | 1 | 0 |
| 29 | 1.216372 | 0.637021 | 0.649194 | 0.099873 | -0.816614 | 0.555439 | -1.272918 | -0.035862 | 0.154194 | -1.797465 | -0.177830 | -1.702118 | -1.136716 | 1 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 342 | 0.056741 | 0.169776 | 0.434163 | -0.208821 | 0.498957 | 0.349198 | 0.547068 | 0.250228 | -0.743894 | -0.497075 | 0.373497 | -0.025547 | 0.260645 | 1 | 1 |
| 343 | -0.845812 | -0.163165 | 0.268174 | 1.312135 | 1.241686 | 1.484484 | 1.279782 | -0.350179 | -0.266719 | -0.170434 | 0.147000 | -0.259175 | -0.323251 | 1 | 1 |
| 344 | 0.056854 | 0.089458 | -0.128149 | 0.123107 | -0.879175 | 0.172486 | 0.919301 | 0.727007 | -0.032509 | -0.594358 | -0.241536 | -0.339538 | -1.563800 | 1 | 1 |
| 345 | 1.322735 | -0.970372 | -1.058427 | 1.018282 | -1.228871 | 0.835533 | 1.462831 | -1.481872 | -2.024441 | 0.388890 | 2.395768 | -0.993539 | 0.301816 | 0 | 1 |
| 346 | -0.159679 | -0.200313 | -0.181878 | 0.221536 | -0.604018 | 0.554979 | 0.173592 | -1.137738 | -1.525377 | -0.382164 | 1.156959 | 0.545188 | -0.873936 | 0 | 1 |
| 347 | -0.510690 | -0.141874 | -0.170690 | -0.486309 | -1.066447 | -1.098392 | -1.513393 | -0.202811 | 0.062343 | 0.446348 | -0.029988 | -0.024432 | -0.978036 | 0 | 1 |
| 348 | 0.441393 | 0.403987 | 0.538948 | 1.253198 | -0.158511 | 0.497768 | 0.151471 | -0.006025 | 0.213458 | 0.119760 | -0.002312 | 0.139434 | -0.401118 | 1 | 1 |
| 349 | 0.548477 | 0.987769 | 0.505748 | 0.779668 | 0.504327 | -0.003400 | 0.200264 | 0.287803 | 0.084852 | -0.044437 | 0.769553 | 0.169816 | -0.581506 | 1 | 1 |
| 350 | 0.278851 | -0.150632 | 1.015313 | 0.158731 | -1.435466 | -0.910636 | 1.526971 | 0.810376 | -0.088268 | 2.273901 | 1.895682 | -0.573207 | 1.173543 | 1 | 1 |
| 351 | 1.781784 | -0.680962 | -0.140043 | 1.730156 | 0.760657 | 1.081874 | 0.686370 | -0.456141 | -0.310319 | 0.443108 | 0.067726 | -0.804283 | 0.268616 | 1 | 1 |
| 352 | 1.110023 | -0.419764 | -0.451242 | 1.471440 | 0.860531 | 0.858025 | 1.016472 | 0.013533 | -0.532955 | 0.597255 | -0.385255 | -1.299309 | 0.869963 | 1 | 1 |
| 353 | 0.463780 | 0.094111 | 0.074193 | 0.457058 | -0.494585 | -0.741218 | -1.615368 | -0.323890 | 0.179301 | -0.914854 | -0.881275 | -0.284568 | 0.516848 | 0 | 1 |
| 354 | 0.162857 | 1.300630 | -0.374191 | -0.148478 | -0.275205 | 0.936621 | -0.301931 | 0.926288 | -0.242039 | -1.217862 | -0.849053 | 0.381655 | 1.521222 | 1 | 1 |
| 355 | -0.261040 | 1.897992 | 0.324175 | 0.250461 | -0.326921 | 0.078347 | -0.794723 | 1.245895 | 0.561437 | 0.299601 | 0.612062 | 0.375109 | 0.668225 | 1 | 1 |
| 356 | -2.412627 | -0.912657 | 0.924859 | 1.091412 | -0.430459 | 0.991776 | 0.577087 | 0.366311 | 0.916132 | -0.010096 | -0.337066 | 0.723121 | 0.634413 | 1 | 1 |
| 357 | -1.610420 | -0.171488 | 1.308910 | 1.557149 | -0.783120 | 1.055891 | 0.070922 | 0.736289 | 0.651236 | -0.209692 | -0.293388 | 0.549580 | 0.947465 | 1 | 1 |
| 358 | -1.627642 | -0.225022 | 1.420291 | 1.585386 | -0.623077 | 1.204209 | 0.203574 | 0.815228 | 0.701131 | -0.111706 | -0.352897 | 0.552444 | 1.038487 | 1 | 1 |
| 359 | 1.658650 | 0.261694 | 0.694273 | -0.634006 | -0.742717 | -1.107684 | -0.040641 | 0.685375 | 0.704374 | 0.457634 | -0.012812 | -0.227444 | -0.311482 | 1 | 1 |
| 360 | -0.472450 | 1.290735 | 1.251486 | 0.902820 | 1.064267 | 0.319911 | 0.273062 | -0.004026 | -0.730129 | -0.487802 | -0.590033 | 0.917054 | 0.316796 | 1 | 1 |
| 361 | 0.145973 | 1.078298 | -0.110458 | 0.396705 | 0.465683 | 0.120005 | 0.324478 | 0.647014 | 0.406366 | 0.303529 | 0.342183 | 0.418467 | -0.257006 | 1 | 1 |
| 362 | 1.354053 | 0.408020 | -1.449365 | -0.144038 | 0.735070 | 1.458916 | -0.253049 | 0.476118 | 1.309448 | 1.981607 | 0.319930 | -0.734588 | -2.427842 | 1 | 1 |
| 363 | 3.546326 | -0.337767 | -0.983896 | -3.155084 | 1.922015 | 3.128359 | 1.576092 | 2.767242 | 2.734920 | 1.749030 | -1.432287 | -5.486282 | -3.776088 | 2 | 1 |
| 364 | 3.564797 | -0.492960 | -0.663172 | -2.465245 | 2.044991 | 3.045697 | 1.746383 | 2.238430 | 2.806354 | 2.318786 | -0.732814 | -5.203217 | -4.762769 | 2 | 1 |
| 365 | -0.480041 | 0.390140 | 0.283493 | 0.710367 | 0.436247 | 0.787936 | 0.149057 | 1.081200 | 1.130496 | 0.783116 | 1.174331 | 0.987069 | 1.027523 | 1 | 1 |
| 366 | 1.601344 | 1.120977 | 0.942690 | 0.218542 | 1.432015 | 1.975393 | 1.352637 | 0.851851 | 0.707687 | 1.420656 | -1.301018 | 0.996552 | 2.286308 | 1 | 1 |
| 367 | -1.388425 | 0.554214 | 2.322455 | 0.125526 | 0.168411 | 1.459935 | -0.011567 | 0.377516 | 1.801634 | 1.061665 | 1.432895 | 1.553952 | 0.335629 | 1 | 1 |
| 368 | -0.560849 | 0.191976 | -1.558597 | -1.925355 | 0.006144 | 1.582531 | 2.334107 | 1.472221 | 0.788027 | -0.066399 | -0.539592 | -0.089987 | 1.081026 | 1 | 1 |
| 369 | -0.015724 | 0.095384 | -0.050287 | 0.330265 | -0.857518 | -1.110181 | 0.457976 | 1.235890 | 0.937447 | 1.294180 | 1.196429 | 1.964913 | 0.788473 | 1 | 1 |
| 370 | -0.320091 | 0.789370 | -0.347116 | -0.257819 | -0.264866 | 0.119392 | 0.174557 | -0.102622 | -0.147261 | 0.927949 | 1.494185 | 0.617596 | -0.119013 | 1 | 1 |
| 371 | -1.397911 | 0.969347 | -0.218602 | 0.165675 | -1.187201 | -1.022111 | 0.090487 | -0.281353 | -1.520146 | -0.950575 | -1.733689 | -0.924046 | -0.731033 | 0 | 1 |
372 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x2445d307358>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[1]))
X = df_n_ps_std_mfcc[1].drop(columns='Cluster')
y = df_n_ps[1]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(191, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (20, 20, 20), 'learning_rate_init': 0.01, 'max_iter': 2000}, que permiten obtener un Accuracy de 81.68% y un Kappa del 52.97
Tiempo total: 25.07 minutos
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_4 (Dense) (None, 20) 280 _________________________________________________________________ dense_5 (Dense) (None, 20) 420 _________________________________________________________________ dense_6 (Dense) (None, 20) 420 _________________________________________________________________ dense_7 (Dense) (None, 1) 21 ================================================================= Total params: 1,141 Trainable params: 1,141 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 191 samples, validate on 64 samples Epoch 1/2000 191/191 [==============================] - 2s 10ms/step - loss: 0.6523 - accuracy: 0.5864 - val_loss: 0.5471 - val_accuracy: 0.7031 Epoch 2/2000 191/191 [==============================] - 0s 89us/step - loss: 0.4800 - accuracy: 0.7696 - val_loss: 0.5294 - val_accuracy: 0.7500 Epoch 3/2000 191/191 [==============================] - 0s 68us/step - loss: 0.4421 - accuracy: 0.8010 - val_loss: 0.5305 - val_accuracy: 0.7812 Epoch 4/2000 191/191 [==============================] - 0s 63us/step - loss: 0.4243 - accuracy: 0.8168 - val_loss: 0.5390 - val_accuracy: 0.7500 Epoch 5/2000 191/191 [==============================] - 0s 73us/step - loss: 0.4080 - accuracy: 0.8168 - val_loss: 0.5368 - val_accuracy: 0.7500 Epoch 6/2000 191/191 [==============================] - 0s 58us/step - loss: 0.3924 - accuracy: 0.8168 - val_loss: 0.5567 - val_accuracy: 0.7188 Epoch 7/2000 191/191 [==============================] - 0s 89us/step - loss: 0.3782 - accuracy: 0.8168 - val_loss: 0.5325 - val_accuracy: 0.7500 Epoch 8/2000 191/191 [==============================] - 0s 63us/step - loss: 0.3562 - accuracy: 0.8429 - val_loss: 0.5605 - val_accuracy: 0.7500 Epoch 9/2000 191/191 [==============================] - 0s 63us/step - loss: 0.3462 - accuracy: 0.8586 - val_loss: 0.5968 - val_accuracy: 0.7188 Epoch 10/2000 191/191 [==============================] - 0s 89us/step - loss: 0.3171 - accuracy: 0.8639 - val_loss: 0.5665 - val_accuracy: 0.7500 Epoch 11/2000 191/191 [==============================] - 0s 68us/step - loss: 0.3107 - accuracy: 0.8639 - val_loss: 0.5456 - val_accuracy: 0.7656 Epoch 12/2000 191/191 [==============================] - 0s 68us/step - loss: 0.2855 - accuracy: 0.8743 - val_loss: 0.6107 - val_accuracy: 0.7500 Epoch 13/2000 191/191 [==============================] - 0s 58us/step - loss: 0.2626 - accuracy: 0.8848 - val_loss: 0.6076 - val_accuracy: 0.7656 Epoch 00013: ReduceLROnPlateau reducing learning rate to 0.004999999888241291. Epoch 14/2000 191/191 [==============================] - 0s 94us/step - loss: 0.2396 - accuracy: 0.9215 - val_loss: 0.5932 - val_accuracy: 0.7656 Epoch 15/2000 191/191 [==============================] - 0s 68us/step - loss: 0.2269 - accuracy: 0.9162 - val_loss: 0.5980 - val_accuracy: 0.7812 Epoch 16/2000 191/191 [==============================] - 0s 63us/step - loss: 0.2133 - accuracy: 0.9372 - val_loss: 0.6289 - val_accuracy: 0.7656 Epoch 17/2000 191/191 [==============================] - 0s 68us/step - loss: 0.2023 - accuracy: 0.9372 - val_loss: 0.6384 - val_accuracy: 0.7656 Epoch 18/2000 191/191 [==============================] - 0s 73us/step - loss: 0.1904 - accuracy: 0.9424 - val_loss: 0.6197 - val_accuracy: 0.7656 Epoch 19/2000 191/191 [==============================] - 0s 63us/step - loss: 0.1817 - accuracy: 0.9372 - val_loss: 0.6255 - val_accuracy: 0.7656 Epoch 20/2000 191/191 [==============================] - 0s 68us/step - loss: 0.1725 - accuracy: 0.9424 - val_loss: 0.6415 - val_accuracy: 0.7500 Epoch 21/2000 191/191 [==============================] - 0s 73us/step - loss: 0.1603 - accuracy: 0.9424 - val_loss: 0.6400 - val_accuracy: 0.7656 Epoch 22/2000 191/191 [==============================] - 0s 58us/step - loss: 0.1473 - accuracy: 0.9581 - val_loss: 0.6420 - val_accuracy: 0.7500 Epoch 23/2000 191/191 [==============================] - 0s 58us/step - loss: 0.1367 - accuracy: 0.9738 - val_loss: 0.6448 - val_accuracy: 0.7656 Epoch 00023: ReduceLROnPlateau reducing learning rate to 0.0024999999441206455. Epoch 24/2000 191/191 [==============================] - 0s 68us/step - loss: 0.1248 - accuracy: 0.9791 - val_loss: 0.6578 - val_accuracy: 0.7812 Epoch 25/2000 191/191 [==============================] - 0s 63us/step - loss: 0.1211 - accuracy: 0.9738 - val_loss: 0.6633 - val_accuracy: 0.7812 Epoch 26/2000 191/191 [==============================] - 0s 58us/step - loss: 0.1166 - accuracy: 0.9791 - val_loss: 0.6651 - val_accuracy: 0.7656 Epoch 27/2000 191/191 [==============================] - 0s 110us/step - loss: 0.1121 - accuracy: 0.9791 - val_loss: 0.6655 - val_accuracy: 0.7500 Epoch 28/2000 191/191 [==============================] - 0s 73us/step - loss: 0.1069 - accuracy: 0.9791 - val_loss: 0.6765 - val_accuracy: 0.7656 Epoch 29/2000 191/191 [==============================] - 0s 63us/step - loss: 0.1026 - accuracy: 0.9791 - val_loss: 0.6865 - val_accuracy: 0.7656 Epoch 30/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0981 - accuracy: 0.9843 - val_loss: 0.6846 - val_accuracy: 0.7656 Epoch 31/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0922 - accuracy: 0.9843 - val_loss: 0.6932 - val_accuracy: 0.7656 Epoch 32/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0883 - accuracy: 0.9843 - val_loss: 0.7066 - val_accuracy: 0.7656 Epoch 33/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0829 - accuracy: 0.9895 - val_loss: 0.7111 - val_accuracy: 0.7656 Epoch 00033: ReduceLROnPlateau reducing learning rate to 0.0012499999720603228. Epoch 34/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0790 - accuracy: 0.9895 - val_loss: 0.7178 - val_accuracy: 0.7656 Epoch 35/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0767 - accuracy: 0.9895 - val_loss: 0.7203 - val_accuracy: 0.7656 Epoch 36/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0752 - accuracy: 0.9895 - val_loss: 0.7267 - val_accuracy: 0.7656 Epoch 37/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0729 - accuracy: 0.9895 - val_loss: 0.7269 - val_accuracy: 0.7656 Epoch 38/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0705 - accuracy: 0.9895 - val_loss: 0.7364 - val_accuracy: 0.7656 Epoch 39/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0686 - accuracy: 0.9895 - val_loss: 0.7434 - val_accuracy: 0.7656 Epoch 40/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0668 - accuracy: 0.9895 - val_loss: 0.7461 - val_accuracy: 0.7812 Epoch 41/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0648 - accuracy: 0.9895 - val_loss: 0.7476 - val_accuracy: 0.7812 Epoch 42/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0631 - accuracy: 0.9895 - val_loss: 0.7577 - val_accuracy: 0.7812 Epoch 43/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0610 - accuracy: 0.9895 - val_loss: 0.7680 - val_accuracy: 0.7812 Epoch 00043: ReduceLROnPlateau reducing learning rate to 0.0006249999860301614. Epoch 44/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0592 - accuracy: 0.9895 - val_loss: 0.7672 - val_accuracy: 0.7812 Epoch 45/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0582 - accuracy: 0.9895 - val_loss: 0.7674 - val_accuracy: 0.7812 Epoch 46/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0576 - accuracy: 0.9895 - val_loss: 0.7688 - val_accuracy: 0.7812 Epoch 47/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0566 - accuracy: 0.9895 - val_loss: 0.7700 - val_accuracy: 0.7812 Epoch 48/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0557 - accuracy: 0.9895 - val_loss: 0.7748 - val_accuracy: 0.7812 Epoch 49/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0547 - accuracy: 0.9895 - val_loss: 0.7777 - val_accuracy: 0.7812 Epoch 50/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0540 - accuracy: 0.9948 - val_loss: 0.7821 - val_accuracy: 0.7812 Epoch 51/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0533 - accuracy: 0.9948 - val_loss: 0.7849 - val_accuracy: 0.7812 Epoch 52/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0525 - accuracy: 0.9948 - val_loss: 0.7842 - val_accuracy: 0.7812 Epoch 53/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0515 - accuracy: 0.9948 - val_loss: 0.7869 - val_accuracy: 0.7812 Epoch 00053: ReduceLROnPlateau reducing learning rate to 0.0003124999930150807. Epoch 54/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0509 - accuracy: 0.9948 - val_loss: 0.7898 - val_accuracy: 0.7812 Epoch 55/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0504 - accuracy: 0.9948 - val_loss: 0.7899 - val_accuracy: 0.7812 Epoch 56/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0500 - accuracy: 0.9948 - val_loss: 0.7926 - val_accuracy: 0.7812 Epoch 57/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0496 - accuracy: 0.9948 - val_loss: 0.7942 - val_accuracy: 0.7812 Epoch 58/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0492 - accuracy: 0.9948 - val_loss: 0.7951 - val_accuracy: 0.7812 Epoch 59/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0487 - accuracy: 0.9948 - val_loss: 0.7963 - val_accuracy: 0.7812 Epoch 60/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0484 - accuracy: 0.9948 - val_loss: 0.7975 - val_accuracy: 0.7812 Epoch 61/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0480 - accuracy: 0.9948 - val_loss: 0.7996 - val_accuracy: 0.7812 Epoch 62/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0476 - accuracy: 0.9948 - val_loss: 0.8015 - val_accuracy: 0.7812 Epoch 63/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0472 - accuracy: 0.9948 - val_loss: 0.8032 - val_accuracy: 0.7656 Epoch 00063: ReduceLROnPlateau reducing learning rate to 0.00015624999650754035. Epoch 64/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0468 - accuracy: 0.9948 - val_loss: 0.8047 - val_accuracy: 0.7656 Epoch 65/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0466 - accuracy: 0.9948 - val_loss: 0.8048 - val_accuracy: 0.7656 Epoch 66/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0464 - accuracy: 0.9948 - val_loss: 0.8054 - val_accuracy: 0.7656 Epoch 67/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0462 - accuracy: 0.9948 - val_loss: 0.8063 - val_accuracy: 0.7656 Epoch 68/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0461 - accuracy: 0.9948 - val_loss: 0.8067 - val_accuracy: 0.7656 Epoch 69/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0459 - accuracy: 0.9948 - val_loss: 0.8076 - val_accuracy: 0.7656 Epoch 70/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0456 - accuracy: 0.9948 - val_loss: 0.8089 - val_accuracy: 0.7656 Epoch 71/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0455 - accuracy: 0.9948 - val_loss: 0.8099 - val_accuracy: 0.7656 Epoch 72/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0452 - accuracy: 0.9948 - val_loss: 0.8107 - val_accuracy: 0.7656 Epoch 73/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0451 - accuracy: 0.9948 - val_loss: 0.8106 - val_accuracy: 0.7656 Epoch 00073: ReduceLROnPlateau reducing learning rate to 7.812499825377017e-05. Epoch 74/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0449 - accuracy: 0.9948 - val_loss: 0.8107 - val_accuracy: 0.7656 Epoch 75/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0448 - accuracy: 0.9948 - val_loss: 0.8113 - val_accuracy: 0.7656 Epoch 76/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0447 - accuracy: 0.9948 - val_loss: 0.8118 - val_accuracy: 0.7656 Epoch 77/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0446 - accuracy: 0.9948 - val_loss: 0.8122 - val_accuracy: 0.7656 Epoch 78/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0445 - accuracy: 0.9948 - val_loss: 0.8128 - val_accuracy: 0.7656 Epoch 79/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0444 - accuracy: 0.9948 - val_loss: 0.8131 - val_accuracy: 0.7656 Epoch 80/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0443 - accuracy: 0.9948 - val_loss: 0.8137 - val_accuracy: 0.7656 Epoch 81/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0442 - accuracy: 0.9948 - val_loss: 0.8142 - val_accuracy: 0.7656 Epoch 82/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0441 - accuracy: 0.9948 - val_loss: 0.8146 - val_accuracy: 0.7656 Epoch 83/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0440 - accuracy: 0.9948 - val_loss: 0.8150 - val_accuracy: 0.7656 Epoch 00083: ReduceLROnPlateau reducing learning rate to 3.9062499126885086e-05. Epoch 84/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0439 - accuracy: 0.9948 - val_loss: 0.8152 - val_accuracy: 0.7656 Epoch 85/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0439 - accuracy: 0.9948 - val_loss: 0.8155 - val_accuracy: 0.7656 Epoch 86/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0438 - accuracy: 0.9948 - val_loss: 0.8155 - val_accuracy: 0.7656 Epoch 87/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0438 - accuracy: 0.9948 - val_loss: 0.8159 - val_accuracy: 0.7656 Epoch 88/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0437 - accuracy: 0.9948 - val_loss: 0.8161 - val_accuracy: 0.7656 Epoch 89/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0437 - accuracy: 0.9948 - val_loss: 0.8162 - val_accuracy: 0.7656 Epoch 90/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0436 - accuracy: 0.9948 - val_loss: 0.8164 - val_accuracy: 0.7656 Epoch 91/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0436 - accuracy: 0.9948 - val_loss: 0.8166 - val_accuracy: 0.7656 Epoch 92/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0435 - accuracy: 0.9948 - val_loss: 0.8170 - val_accuracy: 0.7656 Epoch 93/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0435 - accuracy: 0.9948 - val_loss: 0.8171 - val_accuracy: 0.7656 Epoch 00093: ReduceLROnPlateau reducing learning rate to 1.9531249563442543e-05. Epoch 94/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0434 - accuracy: 0.9948 - val_loss: 0.8172 - val_accuracy: 0.7656 Epoch 95/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0434 - accuracy: 0.9948 - val_loss: 0.8174 - val_accuracy: 0.7656 Epoch 96/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0434 - accuracy: 0.9948 - val_loss: 0.8175 - val_accuracy: 0.7656 Epoch 97/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8174 - val_accuracy: 0.7656 Epoch 98/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8176 - val_accuracy: 0.7656 Epoch 99/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8177 - val_accuracy: 0.7656 Epoch 100/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8179 - val_accuracy: 0.7656 Epoch 101/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8180 - val_accuracy: 0.7656 Epoch 102/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8180 - val_accuracy: 0.7656 Epoch 103/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8182 - val_accuracy: 0.7656 Epoch 00103: ReduceLROnPlateau reducing learning rate to 9.765624781721272e-06. Epoch 104/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8182 - val_accuracy: 0.7656 Epoch 105/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8183 - val_accuracy: 0.7656 Epoch 106/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8183 - val_accuracy: 0.7656 Epoch 107/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8184 - val_accuracy: 0.7656 Epoch 108/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8185 - val_accuracy: 0.7656 Epoch 109/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8185 - val_accuracy: 0.7656 Epoch 110/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8186 - val_accuracy: 0.7656 Epoch 111/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8186 - val_accuracy: 0.7656 Epoch 112/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8187 - val_accuracy: 0.7656 Epoch 113/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8187 - val_accuracy: 0.7656 Epoch 00113: ReduceLROnPlateau reducing learning rate to 4.882812390860636e-06. Epoch 114/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8188 - val_accuracy: 0.7656 Epoch 115/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8188 - val_accuracy: 0.7656 Epoch 116/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8188 - val_accuracy: 0.7656 Epoch 117/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8189 - val_accuracy: 0.7656 Epoch 118/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8189 - val_accuracy: 0.7656 Epoch 119/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8189 - val_accuracy: 0.7656 Epoch 120/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 121/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 122/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 123/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 00123: ReduceLROnPlateau reducing learning rate to 2.441406195430318e-06. Epoch 124/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 125/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 126/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 127/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 128/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 129/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 130/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 131/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 132/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 133/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 00133: ReduceLROnPlateau reducing learning rate to 1.220703097715159e-06. Epoch 134/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 135/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 136/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 137/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 138/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 139/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 140/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 141/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 142/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 143/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 00143: ReduceLROnPlateau reducing learning rate to 6.103515488575795e-07. Epoch 144/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 145/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 146/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 147/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 148/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 149/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 150/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 151/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 152/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 153/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00153: ReduceLROnPlateau reducing learning rate to 3.0517577442878974e-07. Epoch 154/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 155/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 156/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 157/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 158/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 159/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 160/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 161/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 162/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 163/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00163: ReduceLROnPlateau reducing learning rate to 1.5258788721439487e-07. Epoch 164/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 165/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 166/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 167/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 168/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 169/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 170/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 171/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 172/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 173/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00173: ReduceLROnPlateau reducing learning rate to 7.629394360719743e-08. Epoch 174/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 175/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 176/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 177/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 178/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 179/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 180/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 181/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 182/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 183/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00183: ReduceLROnPlateau reducing learning rate to 3.814697180359872e-08. Epoch 184/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 185/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 186/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 187/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 188/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 189/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 190/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 191/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 192/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 193/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00193: ReduceLROnPlateau reducing learning rate to 1.907348590179936e-08. Epoch 194/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 195/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 196/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 197/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 198/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 199/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 200/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 201/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 202/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 203/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00203: ReduceLROnPlateau reducing learning rate to 9.53674295089968e-09. Epoch 204/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 205/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 206/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 207/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 208/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 209/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 210/2000 191/191 [==============================] - ETA: 0s - loss: 0.0166 - accuracy: 1.00 - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 211/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 212/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 213/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00213: ReduceLROnPlateau reducing learning rate to 4.76837147544984e-09. Epoch 214/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 215/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 216/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 217/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 218/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 219/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 220/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 221/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 222/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 223/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00223: ReduceLROnPlateau reducing learning rate to 2.38418573772492e-09. Epoch 224/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 225/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 226/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 227/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 228/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 229/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 230/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 231/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 232/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 233/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00233: ReduceLROnPlateau reducing learning rate to 1.19209286886246e-09. Epoch 234/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 235/2000 191/191 [==============================] - ETA: 0s - loss: 0.0224 - accuracy: 1.00 - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 236/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 237/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 238/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 239/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 240/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 241/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 242/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 243/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00243: ReduceLROnPlateau reducing learning rate to 5.9604643443123e-10. Epoch 244/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 245/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 246/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 247/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 248/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 249/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 250/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 251/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 252/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 253/2000 191/191 [==============================] - ETA: 0s - loss: 0.0256 - accuracy: 1.00 - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00253: ReduceLROnPlateau reducing learning rate to 2.98023217215615e-10. Epoch 254/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 255/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 256/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 257/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 258/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 259/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 260/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 261/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 262/2000 191/191 [==============================] - ETA: 0s - loss: 0.0223 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 263/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00263: ReduceLROnPlateau reducing learning rate to 1.490116086078075e-10. Epoch 264/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 265/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 266/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 267/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 268/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 269/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 270/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 271/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 272/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 273/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00273: ReduceLROnPlateau reducing learning rate to 7.450580430390374e-11. Epoch 274/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 275/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 276/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 277/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 278/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 279/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 280/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 281/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 282/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 283/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00283: ReduceLROnPlateau reducing learning rate to 3.725290215195187e-11. Epoch 284/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 285/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 286/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 287/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 288/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 289/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 290/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 291/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 292/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 293/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00293: ReduceLROnPlateau reducing learning rate to 1.8626451075975936e-11. Epoch 294/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 295/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 296/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 297/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 298/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 299/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 300/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 301/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 302/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 303/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00303: ReduceLROnPlateau reducing learning rate to 9.313225537987968e-12. Epoch 304/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 305/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 306/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 307/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 308/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 309/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 310/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 311/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 312/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 313/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00313: ReduceLROnPlateau reducing learning rate to 4.656612768993984e-12. Epoch 314/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 315/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 316/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 317/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 318/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 319/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 320/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 321/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 322/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 323/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00323: ReduceLROnPlateau reducing learning rate to 2.328306384496992e-12. Epoch 324/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 325/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 326/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 327/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 328/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 329/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 330/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 331/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 332/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 333/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00333: ReduceLROnPlateau reducing learning rate to 1.164153192248496e-12. Epoch 334/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 335/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 336/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 337/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 338/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 339/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 340/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 341/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 342/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 343/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00343: ReduceLROnPlateau reducing learning rate to 5.82076596124248e-13. Epoch 344/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 345/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 346/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 347/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 348/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 349/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 350/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 351/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 352/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 353/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00353: ReduceLROnPlateau reducing learning rate to 2.91038298062124e-13. Epoch 354/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 355/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 356/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 357/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 358/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 359/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 360/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 361/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 362/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 363/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00363: ReduceLROnPlateau reducing learning rate to 1.45519149031062e-13. Epoch 364/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 365/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 366/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 367/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 368/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 369/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 370/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 371/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 372/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 373/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00373: ReduceLROnPlateau reducing learning rate to 7.2759574515531e-14. Epoch 374/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 375/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 376/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 377/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 378/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 379/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 380/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 381/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 382/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 383/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00383: ReduceLROnPlateau reducing learning rate to 3.63797872577655e-14. Epoch 384/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 385/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 386/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 387/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 388/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 389/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 390/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 391/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 392/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 393/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00393: ReduceLROnPlateau reducing learning rate to 1.818989362888275e-14. Epoch 394/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 395/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 396/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 397/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 398/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 399/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 400/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 401/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 402/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 403/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00403: ReduceLROnPlateau reducing learning rate to 9.094946814441375e-15. Epoch 404/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 405/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 406/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 407/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 408/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 409/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 410/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 411/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 412/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 413/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00413: ReduceLROnPlateau reducing learning rate to 4.5474734072206875e-15. Epoch 414/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 415/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 416/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 417/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 418/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 419/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 420/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 421/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 422/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 423/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00423: ReduceLROnPlateau reducing learning rate to 2.2737367036103438e-15. Epoch 424/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 425/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 426/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 427/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 428/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 429/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 430/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 431/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 432/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 433/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00433: ReduceLROnPlateau reducing learning rate to 1.1368683518051719e-15. Epoch 434/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 435/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 436/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 437/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 438/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 439/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 440/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 441/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 442/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 443/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00443: ReduceLROnPlateau reducing learning rate to 5.684341759025859e-16. Epoch 444/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 445/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 446/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 447/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 448/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 449/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 450/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 451/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 452/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 453/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00453: ReduceLROnPlateau reducing learning rate to 2.8421708795129297e-16. Epoch 454/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 455/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 456/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 457/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 458/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 459/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 460/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 461/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 462/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 463/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00463: ReduceLROnPlateau reducing learning rate to 1.4210854397564648e-16. Epoch 464/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 465/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 466/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 467/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 468/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 469/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 470/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 471/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 472/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 473/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00473: ReduceLROnPlateau reducing learning rate to 7.105427198782324e-17. Epoch 474/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 475/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 476/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 477/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 478/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 479/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 480/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 481/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 482/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 483/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00483: ReduceLROnPlateau reducing learning rate to 3.552713599391162e-17. Epoch 484/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 485/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 486/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 487/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 488/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 489/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 490/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 491/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 492/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 493/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00493: ReduceLROnPlateau reducing learning rate to 1.776356799695581e-17. Epoch 494/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 495/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 496/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 497/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 498/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 499/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 500/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 501/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 502/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 503/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00503: ReduceLROnPlateau reducing learning rate to 8.881783998477905e-18. Epoch 504/2000 191/191 [==============================] - 0s 152us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 505/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 506/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 507/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 508/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 509/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 510/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 511/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 512/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 513/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00513: ReduceLROnPlateau reducing learning rate to 4.440891999238953e-18. Epoch 514/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 515/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 516/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 517/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 518/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 519/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 520/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 521/2000 191/191 [==============================] - 0s 141us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 522/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 523/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00523: ReduceLROnPlateau reducing learning rate to 2.2204459996194763e-18. Epoch 524/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 525/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 526/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 527/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 528/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 529/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 530/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 531/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 532/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 533/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00533: ReduceLROnPlateau reducing learning rate to 1.1102229998097382e-18. Epoch 534/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 535/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 536/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 537/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 538/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 539/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 540/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 541/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 542/2000 191/191 [==============================] - ETA: 0s - loss: 0.0396 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 543/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00543: ReduceLROnPlateau reducing learning rate to 5.551114999048691e-19. Epoch 544/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 545/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 546/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 547/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 548/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 549/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 550/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 551/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 552/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 553/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00553: ReduceLROnPlateau reducing learning rate to 2.7755574995243454e-19. Epoch 554/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 555/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 556/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 557/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 558/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 559/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 560/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 561/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 562/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 563/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00563: ReduceLROnPlateau reducing learning rate to 1.3877787497621727e-19. Epoch 564/2000 191/191 [==============================] - 0s 303us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 565/2000 191/191 [==============================] - 0s 288us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 566/2000 191/191 [==============================] - 0s 194us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 567/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 568/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 569/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 570/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 571/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 572/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 573/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00573: ReduceLROnPlateau reducing learning rate to 6.938893748810864e-20. Epoch 574/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 575/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 576/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 577/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 578/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 579/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 580/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 581/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 582/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 583/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00583: ReduceLROnPlateau reducing learning rate to 3.469446874405432e-20. Epoch 584/2000 191/191 [==============================] - 0s 152us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 585/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 586/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 587/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 588/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 589/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 590/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 591/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 592/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 593/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00593: ReduceLROnPlateau reducing learning rate to 1.734723437202716e-20. Epoch 594/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 595/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 596/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 597/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 598/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 599/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 600/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 601/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 602/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 603/2000 191/191 [==============================] - 0s 173us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00603: ReduceLROnPlateau reducing learning rate to 8.67361718601358e-21. Epoch 604/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 605/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 606/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 607/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 608/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 609/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 610/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 611/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 612/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 613/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00613: ReduceLROnPlateau reducing learning rate to 4.33680859300679e-21. Epoch 614/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 615/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 616/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 617/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 618/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 619/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 620/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 621/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 622/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 623/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00623: ReduceLROnPlateau reducing learning rate to 2.168404296503395e-21. Epoch 624/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 625/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 626/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 627/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 628/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 629/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 630/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 631/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 632/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 633/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00633: ReduceLROnPlateau reducing learning rate to 1.0842021482516974e-21. Epoch 634/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 635/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 636/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 637/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 638/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 639/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 640/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 641/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 642/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 643/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00643: ReduceLROnPlateau reducing learning rate to 5.421010741258487e-22. Epoch 644/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 645/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 646/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 647/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 648/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 649/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 650/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 651/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 652/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 653/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00653: ReduceLROnPlateau reducing learning rate to 2.7105053706292436e-22. Epoch 654/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 655/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 656/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 657/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 658/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 659/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 660/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 661/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 662/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 663/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00663: ReduceLROnPlateau reducing learning rate to 1.3552526853146218e-22. Epoch 664/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 665/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 666/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 667/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 668/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 669/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 670/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 671/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 672/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 673/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00673: ReduceLROnPlateau reducing learning rate to 6.776263426573109e-23. Epoch 674/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 675/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 676/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 677/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 678/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 679/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 680/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 681/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 682/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 683/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00683: ReduceLROnPlateau reducing learning rate to 3.3881317132865545e-23. Epoch 684/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 685/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 686/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 687/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 688/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 689/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 690/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 691/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 692/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 693/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00693: ReduceLROnPlateau reducing learning rate to 1.6940658566432772e-23. Epoch 694/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 695/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 696/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 697/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 698/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 699/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 700/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 701/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 702/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 703/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00703: ReduceLROnPlateau reducing learning rate to 8.470329283216386e-24. Epoch 704/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 705/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 706/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 707/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 708/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 709/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 710/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 711/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 712/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 713/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00713: ReduceLROnPlateau reducing learning rate to 4.235164641608193e-24. Epoch 714/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 715/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 716/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 717/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 718/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 719/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 720/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 721/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 722/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 723/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00723: ReduceLROnPlateau reducing learning rate to 2.1175823208040965e-24. Epoch 724/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 725/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 726/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 727/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 728/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 729/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 730/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 731/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 732/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 733/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00733: ReduceLROnPlateau reducing learning rate to 1.0587911604020483e-24. Epoch 734/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 735/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 736/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 737/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 738/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 739/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 740/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 741/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 742/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 743/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00743: ReduceLROnPlateau reducing learning rate to 5.293955802010241e-25. Epoch 744/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 745/2000 191/191 [==============================] - ETA: 0s - loss: 0.0317 - accuracy: 1.00 - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 746/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 747/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 748/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 749/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 750/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 751/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 752/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 753/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00753: ReduceLROnPlateau reducing learning rate to 2.6469779010051207e-25. Epoch 754/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 755/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 756/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 757/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 758/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 759/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 760/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 761/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 762/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 763/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00763: ReduceLROnPlateau reducing learning rate to 1.3234889505025603e-25. Epoch 764/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 765/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 766/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 767/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 768/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 769/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 770/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 771/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 772/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 773/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00773: ReduceLROnPlateau reducing learning rate to 6.617444752512802e-26. Epoch 774/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 775/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 776/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 777/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 778/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 779/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 780/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 781/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 782/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 783/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00783: ReduceLROnPlateau reducing learning rate to 3.308722376256401e-26. Epoch 784/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 785/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 786/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 787/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 788/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 789/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 790/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 791/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 792/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 793/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00793: ReduceLROnPlateau reducing learning rate to 1.6543611881282004e-26. Epoch 794/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 795/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 796/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 797/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 798/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 799/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 800/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 801/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 802/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 803/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00803: ReduceLROnPlateau reducing learning rate to 8.271805940641002e-27. Epoch 804/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 805/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 806/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 807/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 808/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 809/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 810/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 811/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 812/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 813/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00813: ReduceLROnPlateau reducing learning rate to 4.135902970320501e-27. Epoch 814/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 815/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 816/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 817/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 818/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 819/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 820/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 821/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 822/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 823/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00823: ReduceLROnPlateau reducing learning rate to 2.0679514851602505e-27. Epoch 824/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 825/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 826/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 827/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 828/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 829/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 830/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 831/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 832/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 833/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00833: ReduceLROnPlateau reducing learning rate to 1.0339757425801253e-27. Epoch 834/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 835/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 836/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 837/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 838/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 839/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 840/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 841/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 842/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 843/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00843: ReduceLROnPlateau reducing learning rate to 5.169878712900626e-28. Epoch 844/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 845/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 846/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 847/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 848/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 849/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 850/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 851/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 852/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 853/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00853: ReduceLROnPlateau reducing learning rate to 2.584939356450313e-28. Epoch 854/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 855/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 856/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 857/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 858/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 859/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 860/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 861/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 862/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 863/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00863: ReduceLROnPlateau reducing learning rate to 1.2924696782251566e-28. Epoch 864/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 865/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 866/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 867/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 868/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 869/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 870/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 871/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 872/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 873/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00873: ReduceLROnPlateau reducing learning rate to 6.462348391125783e-29. Epoch 874/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 875/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 876/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 877/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 878/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 879/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 880/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 881/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 882/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 883/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00883: ReduceLROnPlateau reducing learning rate to 3.2311741955628914e-29. Epoch 884/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 885/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 886/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 887/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 888/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 889/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 890/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 891/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 892/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 893/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00893: ReduceLROnPlateau reducing learning rate to 1.6155870977814457e-29. Epoch 894/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 895/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 896/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 897/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 898/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 899/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 900/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 901/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 902/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 903/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00903: ReduceLROnPlateau reducing learning rate to 8.077935488907229e-30. Epoch 904/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 905/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 906/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 907/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 908/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 909/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 910/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 911/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 912/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 913/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00913: ReduceLROnPlateau reducing learning rate to 4.038967744453614e-30. Epoch 914/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 915/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 916/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 917/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 918/2000 191/191 [==============================] - ETA: 0s - loss: 0.0296 - accuracy: 1.00 - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 919/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 920/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 921/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 922/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 923/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00923: ReduceLROnPlateau reducing learning rate to 2.019483872226807e-30. Epoch 924/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 925/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 926/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 927/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 928/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 929/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 930/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 931/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 932/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 933/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00933: ReduceLROnPlateau reducing learning rate to 1.0097419361134036e-30. Epoch 934/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 935/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 936/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 937/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 938/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 939/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 940/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 941/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 942/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 943/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00943: ReduceLROnPlateau reducing learning rate to 5.048709680567018e-31. Epoch 944/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 945/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 946/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 947/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 948/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 949/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 950/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 951/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 952/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 953/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00953: ReduceLROnPlateau reducing learning rate to 2.524354840283509e-31. Epoch 954/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 955/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 956/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 957/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 958/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 959/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 960/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 961/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 962/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 963/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00963: ReduceLROnPlateau reducing learning rate to 1.2621774201417545e-31. Epoch 964/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 965/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 966/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 967/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 968/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 969/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 970/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 971/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 972/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 973/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00973: ReduceLROnPlateau reducing learning rate to 6.310887100708772e-32. Epoch 974/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 975/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 976/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 977/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 978/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 979/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 980/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 981/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 982/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 983/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00983: ReduceLROnPlateau reducing learning rate to 3.155443550354386e-32. Epoch 984/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 985/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 986/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 987/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 988/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 989/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 990/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 991/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 992/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 993/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00993: ReduceLROnPlateau reducing learning rate to 1.577721775177193e-32. Epoch 994/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 995/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 996/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 997/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 998/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 999/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1000/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1001/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1002/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1003/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01003: ReduceLROnPlateau reducing learning rate to 7.888608875885965e-33. Epoch 1004/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1005/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1006/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1007/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1008/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1009/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1010/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1011/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1012/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1013/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01013: ReduceLROnPlateau reducing learning rate to 3.944304437942983e-33. Epoch 1014/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1015/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1016/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1017/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1018/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1019/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1020/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1021/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1022/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1023/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01023: ReduceLROnPlateau reducing learning rate to 1.9721522189714914e-33. Epoch 1024/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1025/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1026/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1027/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1028/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1029/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1030/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1031/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1032/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1033/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01033: ReduceLROnPlateau reducing learning rate to 9.860761094857457e-34. Epoch 1034/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1035/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1036/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1037/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1038/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1039/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1040/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1041/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1042/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1043/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01043: ReduceLROnPlateau reducing learning rate to 4.930380547428728e-34. Epoch 1044/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1045/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1046/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1047/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1048/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1049/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1050/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1051/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1052/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1053/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01053: ReduceLROnPlateau reducing learning rate to 2.465190273714364e-34. Epoch 1054/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1055/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1056/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1057/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1058/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1059/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1060/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1061/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1062/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1063/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01063: ReduceLROnPlateau reducing learning rate to 1.232595136857182e-34. Epoch 1064/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1065/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1066/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1067/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1068/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1069/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1070/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1071/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1072/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1073/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01073: ReduceLROnPlateau reducing learning rate to 6.16297568428591e-35. Epoch 1074/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1075/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1076/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1077/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1078/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1079/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1080/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1081/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1082/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1083/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01083: ReduceLROnPlateau reducing learning rate to 3.081487842142955e-35. Epoch 1084/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1085/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1086/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1087/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1088/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1089/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1090/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1091/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1092/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1093/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01093: ReduceLROnPlateau reducing learning rate to 1.5407439210714776e-35. Epoch 1094/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1095/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1096/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1097/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1098/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1099/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1100/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1101/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1102/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1103/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01103: ReduceLROnPlateau reducing learning rate to 7.703719605357388e-36. Epoch 1104/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1105/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1106/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1107/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1108/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1109/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1110/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1111/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1112/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1113/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01113: ReduceLROnPlateau reducing learning rate to 3.851859802678694e-36. Epoch 1114/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1115/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1116/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1117/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1118/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1119/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1120/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1121/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1122/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1123/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01123: ReduceLROnPlateau reducing learning rate to 1.925929901339347e-36. Epoch 1124/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1125/2000 191/191 [==============================] - ETA: 0s - loss: 0.0433 - accuracy: 1.00 - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1126/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1127/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1128/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1129/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1130/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1131/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1132/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1133/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01133: ReduceLROnPlateau reducing learning rate to 9.629649506696735e-37. Epoch 1134/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1135/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1136/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1137/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1138/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1139/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1140/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1141/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1142/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1143/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01143: ReduceLROnPlateau reducing learning rate to 4.8148247533483676e-37. Epoch 1144/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1145/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1146/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1147/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1148/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1149/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1150/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1151/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1152/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1153/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01153: ReduceLROnPlateau reducing learning rate to 2.4074123766741838e-37. Epoch 1154/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1155/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1156/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1157/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1158/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1159/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1160/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1161/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1162/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1163/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01163: ReduceLROnPlateau reducing learning rate to 1.2037061883370919e-37. Epoch 1164/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1165/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1166/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1167/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1168/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1169/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1170/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1171/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1172/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1173/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01173: ReduceLROnPlateau reducing learning rate to 6.018530941685459e-38. Epoch 1174/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1175/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1176/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1177/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1178/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1179/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1180/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1181/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1182/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1183/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01183: ReduceLROnPlateau reducing learning rate to 3.0092654708427297e-38. Epoch 1184/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1185/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1186/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1187/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1188/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1189/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1190/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1191/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1192/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1193/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01193: ReduceLROnPlateau reducing learning rate to 1.5046327354213649e-38. Epoch 1194/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1195/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1196/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1197/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1198/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1199/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1200/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1201/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1202/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1203/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01203: ReduceLROnPlateau reducing learning rate to 7.523163677106824e-39. Epoch 1204/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1205/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1206/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1207/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1208/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1209/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1210/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1211/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1212/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1213/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01213: ReduceLROnPlateau reducing learning rate to 3.761581838553412e-39. Epoch 1214/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1215/2000 191/191 [==============================] - ETA: 0s - loss: 0.0311 - accuracy: 1.00 - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1216/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1217/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1218/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1219/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1220/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1221/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1222/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1223/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01223: ReduceLROnPlateau reducing learning rate to 1.88079056895209e-39. Epoch 1224/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1225/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1226/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1227/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1228/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1229/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1230/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1231/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1232/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1233/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01233: ReduceLROnPlateau reducing learning rate to 9.40395284476045e-40. Epoch 1234/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1235/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1236/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1237/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1238/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1239/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1240/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1241/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1242/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1243/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01243: ReduceLROnPlateau reducing learning rate to 4.701972919134064e-40. Epoch 1244/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1245/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1246/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1247/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1248/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1249/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1250/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1251/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1252/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1253/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01253: ReduceLROnPlateau reducing learning rate to 2.350986459567032e-40. Epoch 1254/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1255/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1256/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1257/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1258/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1259/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1260/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1261/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1262/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1263/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01263: ReduceLROnPlateau reducing learning rate to 1.175493229783516e-40. Epoch 1264/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1265/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1266/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1267/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1268/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1269/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1270/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1271/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1272/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1273/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01273: ReduceLROnPlateau reducing learning rate to 5.87746614891758e-41. Epoch 1274/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1275/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1276/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1277/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1278/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1279/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1280/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1281/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1282/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1283/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01283: ReduceLROnPlateau reducing learning rate to 2.93873307445879e-41. Epoch 1284/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1285/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1286/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1287/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1288/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1289/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1290/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1291/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1292/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1293/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01293: ReduceLROnPlateau reducing learning rate to 1.4694015696910032e-41. Epoch 1294/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1295/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1296/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1297/2000 191/191 [==============================] - ETA: 0s - loss: 0.0313 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1298/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1299/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1300/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1301/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1302/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1303/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01303: ReduceLROnPlateau reducing learning rate to 7.347007848455016e-42. Epoch 1304/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1305/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1306/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1307/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1308/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1309/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1310/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1311/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1312/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1313/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01313: ReduceLROnPlateau reducing learning rate to 3.673503924227508e-42. Epoch 1314/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1315/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1316/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1317/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1318/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1319/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1320/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1321/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1322/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1323/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01323: ReduceLROnPlateau reducing learning rate to 1.8371022867298352e-42. Epoch 1324/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1325/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1326/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1327/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1328/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1329/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1330/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1331/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1332/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1333/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01333: ReduceLROnPlateau reducing learning rate to 9.185511433649176e-43. Epoch 1334/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1335/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1336/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1337/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1338/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1339/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1340/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1341/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1342/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1343/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01343: ReduceLROnPlateau reducing learning rate to 4.5962589629854e-43. Epoch 1344/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1345/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1346/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1347/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1348/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1349/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1350/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1351/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1352/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1353/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01353: ReduceLROnPlateau reducing learning rate to 2.2981294814927e-43. Epoch 1354/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1355/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1356/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1357/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1358/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1359/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1360/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1361/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1362/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1363/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01363: ReduceLROnPlateau reducing learning rate to 1.14906474074635e-43. Epoch 1364/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1365/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1366/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1367/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1368/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1369/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1370/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1371/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1372/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1373/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01373: ReduceLROnPlateau reducing learning rate to 5.74532370373175e-44. Epoch 1374/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1375/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1376/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1377/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1378/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1379/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1380/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1381/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1382/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1383/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01383: ReduceLROnPlateau reducing learning rate to 2.872661851865875e-44. Epoch 1384/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1385/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1386/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1387/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1388/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1389/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1390/2000 191/191 [==============================] - ETA: 0s - loss: 0.0479 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1391/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1392/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1393/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01393: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-44. Epoch 1394/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1395/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1396/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1397/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1398/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1399/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1400/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1401/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1402/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1403/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01403: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-45. Epoch 1404/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1405/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1406/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1407/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1408/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1409/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1410/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1411/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1412/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1413/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01413: ReduceLROnPlateau reducing learning rate to 3.5032461608120427e-45. Epoch 1414/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1415/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1416/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1417/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1418/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1419/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1420/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1421/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1422/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1423/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01423: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-45. Epoch 1424/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1425/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1426/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1427/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1428/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1429/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1430/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1431/2000 191/191 [==============================] - ETA: 0s - loss: 0.0420 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1432/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1433/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01433: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-46. Epoch 1434/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1435/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1436/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1437/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1438/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1439/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1440/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1441/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1442/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1443/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1444/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1445/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1446/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1447/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1448/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1449/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1450/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1451/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1452/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1453/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1454/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1455/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1456/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1457/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1458/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1459/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1460/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1461/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1462/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1463/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1464/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1465/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1466/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1467/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1468/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1469/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1470/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1471/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1472/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1473/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1474/2000 191/191 [==============================] - ETA: 0s - loss: 0.0491 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1475/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1476/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1477/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1478/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1479/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1480/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1481/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1482/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1483/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1484/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1485/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1486/2000 191/191 [==============================] - 0s 147us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1487/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1488/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1489/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1490/2000 191/191 [==============================] - 0s 114us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1491/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1492/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1493/2000 191/191 [==============================] - 0s 109us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1494/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1495/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1496/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1497/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1498/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1499/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1500/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1501/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1502/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1503/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1504/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1505/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1506/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1507/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1508/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1509/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1510/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1511/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1512/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1513/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1514/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1515/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1516/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1517/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1518/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1519/2000 191/191 [==============================] - 0s 95us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1520/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1521/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1522/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1523/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1524/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1525/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1526/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1527/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1528/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1529/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1530/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1531/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1532/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1533/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1534/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1535/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1536/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1537/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1538/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1539/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1540/2000 191/191 [==============================] - ETA: 0s - loss: 0.0384 - accuracy: 1.00 - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1541/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1542/2000 191/191 [==============================] - 0s 188us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1543/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1544/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1545/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1546/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1547/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1548/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1549/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1550/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1551/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1552/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1553/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1554/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1555/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1556/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1557/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1558/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1559/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1560/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1561/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1562/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1563/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1564/2000 191/191 [==============================] - ETA: 0s - loss: 0.0228 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1565/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1566/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1567/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1568/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1569/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1570/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1571/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1572/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1573/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1574/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1575/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1576/2000 191/191 [==============================] - 0s 111us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1577/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1578/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1579/2000 191/191 [==============================] - 0s 102us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1580/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1581/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1582/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1583/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1584/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1585/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1586/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1587/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1588/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1589/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1590/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1591/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1592/2000 191/191 [==============================] - 0s 101us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1593/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1594/2000 191/191 [==============================] - 0s 112us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1595/2000 191/191 [==============================] - 0s 106us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1596/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1597/2000 191/191 [==============================] - ETA: 0s - loss: 0.0252 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1598/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1599/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1600/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1601/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1602/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1603/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1604/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1605/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1606/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1607/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1608/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1609/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1610/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1611/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1612/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1613/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1614/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1615/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1616/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1617/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1618/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1619/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1620/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1621/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1622/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1623/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1624/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1625/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1626/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1627/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1628/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1629/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1630/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1631/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1632/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1633/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1634/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1635/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1636/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1637/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1638/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1639/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1640/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1641/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1642/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1643/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1644/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1645/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1646/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1647/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1648/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1649/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1650/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1651/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1652/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1653/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1654/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1655/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1656/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1657/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1658/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1659/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1660/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1661/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1662/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1663/2000 191/191 [==============================] - 0s 152us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1664/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1665/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1666/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1667/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1668/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1669/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1670/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1671/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1672/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1673/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1674/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1675/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1676/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1677/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1678/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1679/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1680/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1681/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1682/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1683/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1684/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1685/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1686/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1687/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1688/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1689/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1690/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1691/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1692/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1693/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1694/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1695/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1696/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1697/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1698/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1699/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1700/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1701/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1702/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1703/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1704/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1705/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1706/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1707/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1708/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1709/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1710/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1711/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1712/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1713/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1714/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1715/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1716/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1717/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1718/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1719/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1720/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1721/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1722/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1723/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1724/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1725/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1726/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1727/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1728/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1729/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1730/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1731/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1732/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1733/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1734/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1735/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1736/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1737/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1738/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1739/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1740/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1741/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1742/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1743/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1744/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1745/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1746/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1747/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1748/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1749/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1750/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1751/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1752/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1753/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1754/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1755/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1756/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1757/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1758/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1759/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1760/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1761/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1762/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1763/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1764/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1765/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1766/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1767/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1768/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1769/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1770/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1771/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1772/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1773/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1774/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1775/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1776/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1777/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1778/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1779/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1780/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1781/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1782/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1783/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1784/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1785/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1786/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1787/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1788/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1789/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1790/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1791/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1792/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1793/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1794/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1795/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1796/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1797/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1798/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1799/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1800/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1801/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1802/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1803/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1804/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1805/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1806/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1807/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1808/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1809/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1810/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1811/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1812/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1813/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1814/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1815/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1816/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1817/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1818/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1819/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1820/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1821/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1822/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1823/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1824/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1825/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1826/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1827/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1828/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1829/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1830/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1831/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1832/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1833/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1834/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1835/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1836/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1837/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1838/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1839/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1840/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1841/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1842/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1843/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1844/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1845/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1846/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1847/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1848/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1849/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1850/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1851/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1852/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1853/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1854/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1855/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1856/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1857/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1858/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1859/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1860/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1861/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1862/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1863/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1864/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1865/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1866/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1867/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1868/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1869/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1870/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1871/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1872/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1873/2000 191/191 [==============================] - ETA: 0s - loss: 0.0489 - accuracy: 1.00 - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1874/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1875/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1876/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1877/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1878/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1879/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1880/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1881/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1882/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1883/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1884/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1885/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1886/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1887/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1888/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1889/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1890/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1891/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1892/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1893/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1894/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1895/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1896/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1897/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1898/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1899/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1900/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1901/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1902/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1903/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1904/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1905/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1906/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1907/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1908/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1909/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1910/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1911/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1912/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1913/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1914/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1915/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1916/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1917/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1918/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1919/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1920/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1921/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1922/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1923/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1924/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1925/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1926/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1927/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1928/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1929/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1930/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1931/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1932/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1933/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1934/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1935/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1936/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1937/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1938/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1939/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1940/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1941/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1942/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1943/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1944/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1945/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1946/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1947/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1948/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1949/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1950/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1951/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1952/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1953/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1954/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1955/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1956/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1957/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1958/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1959/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1960/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1961/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1962/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1963/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1964/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1965/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1966/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1967/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1968/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1969/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1970/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1971/2000 191/191 [==============================] - ETA: 0s - loss: 0.0400 - accuracy: 1.00 - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1972/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1973/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1974/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1975/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1976/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1977/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1978/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1979/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1980/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1981/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1982/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1983/2000 191/191 [==============================] - ETA: 0s - loss: 0.0319 - accuracy: 1.00 - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1984/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1985/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1986/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1987/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1988/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1989/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1990/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1991/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1992/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1993/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1994/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1995/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1996/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1997/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1998/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1999/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 2000/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 2000)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
64/64 [==============================] - 0s 47us/step test loss: 0.8193266093730927, test accuracy: 0.765625
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.7282608695652174
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.366754617414248
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.339415 | 0.847773 | 0.497198 | -0.389310 | 1.225458 | 1.947033 | -0.736267 | 0.492219 | 0.576682 | 1.504697 | -1.796460 | 0.724954 | 0.958600 |
| 1 | 0.587658 | -1.195426 | 0.636375 | 0.199876 | 0.765321 | 0.061181 | 0.379367 | -0.440867 | 0.232893 | 1.339920 | 0.110001 | 0.807525 | 0.815678 |
| 2 | 1.465595 | -2.307943 | 0.354567 | -0.058273 | -1.298853 | -0.811453 | -1.551580 | -3.934320 | -1.079432 | 2.546130 | 1.421407 | 0.639359 | 0.199094 |
| 3 | 0.749403 | -1.690498 | -0.125200 | -1.016135 | 0.825845 | 0.271444 | -0.104786 | -0.992141 | 0.049182 | 1.425948 | -0.343269 | -0.789558 | -0.411898 |
| 4 | -0.280577 | 0.393332 | 0.744917 | 2.411400 | -0.777421 | -0.420018 | 1.258355 | -1.544565 | -0.498071 | 0.421527 | -0.632908 | -0.056846 | -0.072348 |
| 5 | -0.158690 | 0.404891 | -0.147920 | -0.299241 | -0.786974 | 0.697216 | 0.290501 | 0.019739 | -1.468086 | -0.346174 | -0.086965 | 0.026492 | 1.019512 |
| 6 | 1.646777 | 0.772744 | -1.425228 | -0.562610 | -1.556076 | 0.533289 | -0.404271 | 1.676958 | 0.979516 | 0.415548 | 0.544719 | 0.433332 | 0.204271 |
| 7 | 1.124970 | 0.506236 | 0.738993 | 1.984485 | -0.928706 | -0.494097 | -0.707105 | -0.494778 | -1.642929 | 0.207467 | 0.181382 | 2.431721 | 0.848697 |
| 8 | 0.920059 | 1.438862 | -2.048354 | 1.503567 | -2.801303 | 0.567132 | -0.745441 | 0.569519 | 0.130917 | 1.965436 | -0.034797 | 1.164878 | 0.074074 |
| 9 | 0.182544 | 0.310622 | 0.067722 | 0.870138 | 0.168366 | 0.682045 | -0.191296 | -0.144962 | -0.630020 | -0.284032 | -0.315301 | 0.344841 | 0.495167 |
| 10 | 0.168663 | 0.389450 | 0.034360 | 1.213392 | 0.248437 | 0.870618 | -0.460824 | -0.174734 | -0.710502 | -0.228408 | -0.265153 | 0.349416 | 0.584114 |
| 11 | 0.153010 | -0.118336 | 0.639531 | 1.504522 | 0.937909 | 0.356048 | -0.089987 | -0.628522 | 0.064203 | 0.966049 | 0.403915 | -0.943626 | 0.173874 |
| 12 | 0.132578 | 0.261966 | -2.871493 | -3.398160 | -0.256458 | 1.596532 | -0.358711 | 0.175955 | -0.499075 | 0.949085 | 2.235525 | -0.197712 | -0.272366 |
| 13 | 1.094629 | 0.885150 | -1.130672 | -0.083270 | 0.672482 | 0.750453 | -0.863949 | 0.140540 | 0.423312 | -0.305155 | -0.424905 | 0.318660 | 0.885900 |
| 14 | 0.771472 | 0.364448 | -0.454696 | 0.434253 | 0.912699 | 0.745924 | -0.073390 | -0.406473 | 0.450765 | 0.323180 | -0.458826 | -0.132295 | 0.495454 |
| 15 | 0.677561 | 0.166795 | 0.746471 | 0.075191 | 0.867924 | -1.621678 | 0.771146 | -0.067286 | 0.557998 | -0.093593 | 0.020233 | -0.800013 | -0.629188 |
| 16 | -0.032353 | 1.227345 | -0.188580 | 0.927210 | 0.016663 | 1.001867 | -0.473811 | 0.782387 | 1.542760 | -0.345478 | -0.838104 | -0.439443 | 1.179204 |
| 17 | 0.459031 | 1.258961 | -0.329412 | 1.391790 | -0.208888 | 1.059241 | -1.245671 | 0.619153 | 0.245780 | 0.644548 | -0.602629 | -0.928581 | 0.739885 |
| 18 | -0.359172 | 0.051214 | -0.603962 | 0.778896 | 1.630471 | 1.802477 | 1.486205 | -0.140738 | -0.894366 | 0.736624 | 2.114721 | 1.078175 | -0.965785 |
| 19 | 0.209859 | -0.615399 | -0.676895 | 0.735655 | 0.805509 | -0.696793 | 1.073068 | 0.240429 | -0.205934 | -0.759693 | 0.672843 | 0.569482 | -0.455391 |
| 20 | 0.127381 | -0.265099 | -0.258801 | -0.127568 | 0.649447 | 0.244473 | 1.897421 | -0.344616 | -0.593159 | 0.065147 | 1.787607 | 1.219355 | -0.171813 |
| 21 | 1.222717 | 0.409860 | 1.311826 | 0.703873 | 0.322062 | 0.305461 | -0.522644 | -0.750833 | 0.001767 | 0.017953 | 0.254329 | -0.227762 | -0.614790 |
| 22 | 1.173352 | 0.490500 | 0.742825 | -0.028159 | -0.272396 | -0.502733 | -0.759443 | -1.031924 | -0.157975 | 0.075659 | 0.604220 | 0.143298 | -0.001849 |
| 23 | 1.069960 | 0.858822 | -0.795544 | 0.076688 | 0.851875 | 0.735014 | -0.758779 | 0.065595 | 0.532667 | -0.391858 | -0.497019 | 0.240822 | 0.848126 |
| 24 | 0.581377 | -0.804045 | 0.399887 | 1.535671 | 0.245878 | 0.904192 | -0.233991 | -0.925983 | 0.212280 | 0.499535 | -0.024926 | -0.925999 | 1.294925 |
| 25 | 0.161110 | 0.025075 | 0.716318 | 1.532230 | 0.889883 | 0.353167 | -0.058787 | -0.593046 | 0.093773 | 0.927085 | 0.199691 | -0.979872 | 0.232850 |
| 26 | 0.431443 | 0.442713 | 0.259120 | 0.045533 | 0.102675 | 0.367606 | 0.054320 | 0.942924 | 0.180609 | 0.550983 | 0.265291 | 0.321252 | -0.830969 |
| 27 | 0.344525 | -1.140315 | -0.725453 | -0.547965 | 0.449924 | 0.303904 | 1.053624 | 1.051712 | 0.509322 | 0.181611 | -0.519979 | -1.134490 | -1.439105 |
| 28 | -0.041565 | 0.671274 | 0.195143 | 0.247294 | 0.531620 | 1.050124 | 0.311358 | 0.988161 | -0.198869 | 0.387795 | 1.757366 | 1.351684 | 0.194840 |
| 29 | 0.417845 | -1.134173 | -0.760709 | -0.605264 | 0.077464 | 0.533333 | 1.104524 | 2.124971 | 0.083548 | 0.801730 | 0.092534 | -1.281628 | -1.468782 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 225 | 1.532114 | -1.060006 | -0.434145 | -0.999435 | -1.259462 | 0.039140 | -0.802013 | -0.655286 | 0.714448 | 1.005958 | -0.086372 | 0.537392 | 0.054440 |
| 226 | -0.942320 | 1.172080 | 0.506725 | -0.230675 | -0.104635 | 0.898742 | -1.107001 | -1.182148 | -0.940991 | 0.232366 | 1.778224 | 0.975251 | 1.731084 |
| 227 | 1.421974 | 0.631029 | -0.563813 | -0.694595 | -0.673270 | 0.929022 | 0.476907 | -1.025173 | -0.813644 | -0.060006 | -0.738730 | -0.558099 | 0.057654 |
| 228 | -1.473385 | -0.806223 | 1.849423 | -1.252541 | 0.941013 | -0.872947 | -1.812392 | -0.242718 | -0.097212 | -0.510500 | -0.232195 | -0.546399 | 0.945530 |
| 229 | -1.135926 | -0.772372 | 1.164844 | -1.022517 | 0.630202 | -0.496999 | -1.101656 | -0.168921 | -0.295159 | -0.587401 | 0.369033 | -0.266325 | 0.604469 |
| 230 | -1.085049 | 0.879566 | 0.442593 | 0.128917 | 0.393498 | 0.531555 | 0.392194 | 1.418515 | 0.891015 | -0.348926 | -0.756201 | -0.838584 | -0.015971 |
| 231 | -0.352258 | 0.556982 | 0.530520 | 0.443818 | 0.300921 | 0.032128 | -0.797384 | -0.573532 | 0.398084 | 0.328875 | -0.274964 | -1.300920 | 0.254456 |
| 232 | -1.190363 | 0.797356 | 0.758472 | 0.587917 | 0.890540 | 0.471925 | 0.105793 | 0.680721 | 0.230834 | -0.150709 | -0.816744 | -0.470618 | 0.371198 |
| 233 | -0.651003 | -0.586618 | 1.326854 | -0.451354 | 0.507113 | 0.165474 | -0.919675 | -0.448249 | -1.310940 | -1.372737 | 0.406029 | -1.414627 | -0.434858 |
| 234 | -1.459511 | -0.516281 | 1.631699 | -1.141842 | 0.584621 | -0.458541 | -1.428877 | -0.934556 | -0.216455 | -0.049794 | 0.095580 | 0.387068 | 0.693730 |
| 235 | -0.726984 | 0.702447 | 0.798069 | -0.320660 | 0.530902 | 1.019988 | 0.144995 | 0.207847 | 0.039592 | 0.220761 | 0.762941 | 0.575034 | 0.671517 |
| 236 | -0.300986 | -0.404923 | 0.715406 | 0.245380 | -0.427936 | -0.334843 | -0.228084 | -0.330898 | -0.674327 | 0.199560 | 0.827455 | 0.016433 | 0.866789 |
| 237 | -0.736244 | 0.088611 | 0.910051 | 0.437100 | 0.258256 | 0.363828 | -0.415290 | -0.717445 | -0.012727 | 0.436925 | -0.786954 | -1.217376 | 0.352825 |
| 238 | 0.610473 | -2.664315 | 1.303652 | -2.022376 | 1.500032 | -1.280926 | -1.249533 | 0.432111 | -0.768558 | 0.291156 | -0.092312 | 0.053770 | -0.401166 |
| 239 | -2.045424 | -2.954642 | 0.302601 | -0.868092 | -1.038134 | -1.230777 | 0.514329 | 0.057591 | -1.023895 | 0.275395 | -1.450282 | 0.386242 | 0.318763 |
| 240 | 0.329793 | -1.367570 | -1.454329 | -0.207924 | -0.723609 | -0.149025 | -0.085298 | -0.011595 | -0.240239 | -0.009120 | -0.325229 | -0.025722 | 0.114182 |
| 241 | -1.919591 | 1.382172 | -0.134161 | 0.837967 | -0.687780 | 0.944303 | -0.258652 | -0.742178 | 0.386031 | -1.178099 | -1.843543 | -0.710556 | -0.318561 |
| 242 | -2.087669 | 1.400006 | -0.494964 | 0.451717 | -0.759188 | 0.736625 | 0.133121 | -0.196031 | 1.121231 | 0.474128 | -0.345937 | -0.409324 | -0.442069 |
| 243 | -2.131652 | 0.439305 | -0.612226 | 0.854126 | -0.494550 | 0.825299 | 0.301373 | -0.018964 | 0.690556 | -0.078762 | -0.709495 | -0.075857 | -0.418656 |
| 244 | -1.611989 | -0.756403 | -0.410917 | 1.075909 | 0.297336 | -1.317576 | 1.115011 | -0.467065 | -0.768378 | 1.615499 | 1.611125 | -1.018782 | -1.798744 |
| 245 | -0.142010 | 0.000190 | -0.063461 | -0.506353 | -0.386942 | -0.256144 | 0.270621 | -1.497417 | 0.507892 | 0.456828 | -0.431169 | -0.978417 | 0.015849 |
| 246 | -1.263975 | -1.168117 | -1.396090 | -0.312016 | 1.862268 | 1.400290 | 0.646060 | -0.686864 | 0.418524 | -0.069926 | -0.653856 | -0.853617 | -0.106814 |
| 247 | -0.507700 | 0.899825 | 1.510153 | 1.083642 | 2.081451 | 0.589016 | 0.901321 | 0.658808 | 0.152596 | 0.176442 | -0.447633 | 0.287838 | 0.650479 |
| 248 | -0.159768 | 0.518093 | 2.197018 | 0.698491 | 0.476336 | -2.014255 | -1.614667 | -0.397282 | -1.781932 | -0.208894 | 1.650551 | -0.771436 | -0.987237 |
| 249 | -1.037899 | 1.016712 | 2.774230 | 0.665468 | -0.385673 | 0.587263 | -0.121609 | -0.331379 | 0.622484 | -0.387131 | -0.276584 | 0.218207 | 1.689216 |
| 250 | -0.526923 | -1.169944 | 0.474875 | -0.789231 | 0.369827 | -0.537003 | -1.089843 | -0.173366 | -0.023237 | -0.142334 | 0.740065 | 0.813114 | 0.872556 |
| 251 | -0.770856 | -1.024349 | -0.019140 | -0.097521 | 0.092703 | 0.369242 | -0.273901 | 0.190740 | -0.074032 | 0.113055 | 0.140291 | -0.696275 | 0.166679 |
| 252 | -0.905458 | -0.790575 | 0.206164 | -0.723816 | -0.444860 | 0.107833 | -0.734514 | -0.533865 | -0.634334 | 0.320526 | 0.088428 | -0.348210 | 0.347201 |
| 253 | -1.378235 | -0.338405 | 0.016815 | -0.394563 | 0.034043 | 1.023865 | -0.303960 | -1.316121 | 0.198697 | 0.670577 | 0.809574 | 0.580565 | 0.056004 |
| 254 | -0.199959 | -2.035812 | -0.904507 | -1.511975 | -0.437843 | 0.262972 | -1.943788 | -1.963300 | -2.256227 | 0.354369 | -0.039829 | 0.882325 | 0.139307 |
255 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[3315.0, 2972.7888695817974, 2748.18187155972, 2544.9420084212106, 2413.687059384553, 2278.037996783226, 2213.3487507256823, 2123.4282707474663, 2067.8299633414163, 1977.777252698108, 1956.5229777214513, 1880.0296166971755, 1815.5096049846275, 1785.9955747862728]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82ae84f98>]
K=6
kmeans_mfcc = KMeans(n_clusters=6, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=6, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.339415 | 0.847773 | 0.497198 | -0.389310 | 1.225458 | 1.947033 | -0.736267 | 0.492219 | 0.576682 | 1.504697 | -1.796460 | 0.724954 | 0.958600 | 4 | 0 |
| 1 | 0.587658 | -1.195426 | 0.636375 | 0.199876 | 0.765321 | 0.061181 | 0.379367 | -0.440867 | 0.232893 | 1.339920 | 0.110001 | 0.807525 | 0.815678 | 2 | 0 |
| 2 | 1.465595 | -2.307943 | 0.354567 | -0.058273 | -1.298853 | -0.811453 | -1.551580 | -3.934320 | -1.079432 | 2.546130 | 1.421407 | 0.639359 | 0.199094 | 2 | 0 |
| 3 | 0.749403 | -1.690498 | -0.125200 | -1.016135 | 0.825845 | 0.271444 | -0.104786 | -0.992141 | 0.049182 | 1.425948 | -0.343269 | -0.789558 | -0.411898 | 2 | 0 |
| 4 | -0.280577 | 0.393332 | 0.744917 | 2.411400 | -0.777421 | -0.420018 | 1.258355 | -1.544565 | -0.498071 | 0.421527 | -0.632908 | -0.056846 | -0.072348 | 0 | 0 |
| 5 | -0.158690 | 0.404891 | -0.147920 | -0.299241 | -0.786974 | 0.697216 | 0.290501 | 0.019739 | -1.468086 | -0.346174 | -0.086965 | 0.026492 | 1.019512 | 1 | 0 |
| 6 | 1.646777 | 0.772744 | -1.425228 | -0.562610 | -1.556076 | 0.533289 | -0.404271 | 1.676958 | 0.979516 | 0.415548 | 0.544719 | 0.433332 | 0.204271 | 1 | 0 |
| 7 | 1.124970 | 0.506236 | 0.738993 | 1.984485 | -0.928706 | -0.494097 | -0.707105 | -0.494778 | -1.642929 | 0.207467 | 0.181382 | 2.431721 | 0.848697 | 0 | 0 |
| 8 | 0.920059 | 1.438862 | -2.048354 | 1.503567 | -2.801303 | 0.567132 | -0.745441 | 0.569519 | 0.130917 | 1.965436 | -0.034797 | 1.164878 | 0.074074 | 1 | 0 |
| 9 | 0.182544 | 0.310622 | 0.067722 | 0.870138 | 0.168366 | 0.682045 | -0.191296 | -0.144962 | -0.630020 | -0.284032 | -0.315301 | 0.344841 | 0.495167 | 4 | 0 |
| 10 | 0.168663 | 0.389450 | 0.034360 | 1.213392 | 0.248437 | 0.870618 | -0.460824 | -0.174734 | -0.710502 | -0.228408 | -0.265153 | 0.349416 | 0.584114 | 4 | 0 |
| 11 | 0.153010 | -0.118336 | 0.639531 | 1.504522 | 0.937909 | 0.356048 | -0.089987 | -0.628522 | 0.064203 | 0.966049 | 0.403915 | -0.943626 | 0.173874 | 4 | 0 |
| 12 | 0.132578 | 0.261966 | -2.871493 | -3.398160 | -0.256458 | 1.596532 | -0.358711 | 0.175955 | -0.499075 | 0.949085 | 2.235525 | -0.197712 | -0.272366 | 1 | 0 |
| 13 | 1.094629 | 0.885150 | -1.130672 | -0.083270 | 0.672482 | 0.750453 | -0.863949 | 0.140540 | 0.423312 | -0.305155 | -0.424905 | 0.318660 | 0.885900 | 4 | 0 |
| 14 | 0.771472 | 0.364448 | -0.454696 | 0.434253 | 0.912699 | 0.745924 | -0.073390 | -0.406473 | 0.450765 | 0.323180 | -0.458826 | -0.132295 | 0.495454 | 4 | 0 |
| 15 | 0.677561 | 0.166795 | 0.746471 | 0.075191 | 0.867924 | -1.621678 | 0.771146 | -0.067286 | 0.557998 | -0.093593 | 0.020233 | -0.800013 | -0.629188 | 3 | 0 |
| 16 | -0.032353 | 1.227345 | -0.188580 | 0.927210 | 0.016663 | 1.001867 | -0.473811 | 0.782387 | 1.542760 | -0.345478 | -0.838104 | -0.439443 | 1.179204 | 4 | 0 |
| 17 | 0.459031 | 1.258961 | -0.329412 | 1.391790 | -0.208888 | 1.059241 | -1.245671 | 0.619153 | 0.245780 | 0.644548 | -0.602629 | -0.928581 | 0.739885 | 4 | 0 |
| 18 | -0.359172 | 0.051214 | -0.603962 | 0.778896 | 1.630471 | 1.802477 | 1.486205 | -0.140738 | -0.894366 | 0.736624 | 2.114721 | 1.078175 | -0.965785 | 4 | 0 |
| 19 | 0.209859 | -0.615399 | -0.676895 | 0.735655 | 0.805509 | -0.696793 | 1.073068 | 0.240429 | -0.205934 | -0.759693 | 0.672843 | 0.569482 | -0.455391 | 4 | 0 |
| 20 | 0.127381 | -0.265099 | -0.258801 | -0.127568 | 0.649447 | 0.244473 | 1.897421 | -0.344616 | -0.593159 | 0.065147 | 1.787607 | 1.219355 | -0.171813 | 4 | 0 |
| 21 | 1.222717 | 0.409860 | 1.311826 | 0.703873 | 0.322062 | 0.305461 | -0.522644 | -0.750833 | 0.001767 | 0.017953 | 0.254329 | -0.227762 | -0.614790 | 0 | 0 |
| 22 | 1.173352 | 0.490500 | 0.742825 | -0.028159 | -0.272396 | -0.502733 | -0.759443 | -1.031924 | -0.157975 | 0.075659 | 0.604220 | 0.143298 | -0.001849 | 0 | 0 |
| 23 | 1.069960 | 0.858822 | -0.795544 | 0.076688 | 0.851875 | 0.735014 | -0.758779 | 0.065595 | 0.532667 | -0.391858 | -0.497019 | 0.240822 | 0.848126 | 4 | 0 |
| 24 | 0.581377 | -0.804045 | 0.399887 | 1.535671 | 0.245878 | 0.904192 | -0.233991 | -0.925983 | 0.212280 | 0.499535 | -0.024926 | -0.925999 | 1.294925 | 4 | 0 |
| 25 | 0.161110 | 0.025075 | 0.716318 | 1.532230 | 0.889883 | 0.353167 | -0.058787 | -0.593046 | 0.093773 | 0.927085 | 0.199691 | -0.979872 | 0.232850 | 4 | 0 |
| 26 | 0.431443 | 0.442713 | 0.259120 | 0.045533 | 0.102675 | 0.367606 | 0.054320 | 0.942924 | 0.180609 | 0.550983 | 0.265291 | 0.321252 | -0.830969 | 4 | 0 |
| 27 | 0.344525 | -1.140315 | -0.725453 | -0.547965 | 0.449924 | 0.303904 | 1.053624 | 1.051712 | 0.509322 | 0.181611 | -0.519979 | -1.134490 | -1.439105 | 3 | 0 |
| 28 | -0.041565 | 0.671274 | 0.195143 | 0.247294 | 0.531620 | 1.050124 | 0.311358 | 0.988161 | -0.198869 | 0.387795 | 1.757366 | 1.351684 | 0.194840 | 4 | 0 |
| 29 | 0.417845 | -1.134173 | -0.760709 | -0.605264 | 0.077464 | 0.533333 | 1.104524 | 2.124971 | 0.083548 | 0.801730 | 0.092534 | -1.281628 | -1.468782 | 3 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 225 | 1.532114 | -1.060006 | -0.434145 | -0.999435 | -1.259462 | 0.039140 | -0.802013 | -0.655286 | 0.714448 | 1.005958 | -0.086372 | 0.537392 | 0.054440 | 1 | 1 |
| 226 | -0.942320 | 1.172080 | 0.506725 | -0.230675 | -0.104635 | 0.898742 | -1.107001 | -1.182148 | -0.940991 | 0.232366 | 1.778224 | 0.975251 | 1.731084 | 0 | 1 |
| 227 | 1.421974 | 0.631029 | -0.563813 | -0.694595 | -0.673270 | 0.929022 | 0.476907 | -1.025173 | -0.813644 | -0.060006 | -0.738730 | -0.558099 | 0.057654 | 1 | 1 |
| 228 | -1.473385 | -0.806223 | 1.849423 | -1.252541 | 0.941013 | -0.872947 | -1.812392 | -0.242718 | -0.097212 | -0.510500 | -0.232195 | -0.546399 | 0.945530 | 2 | 1 |
| 229 | -1.135926 | -0.772372 | 1.164844 | -1.022517 | 0.630202 | -0.496999 | -1.101656 | -0.168921 | -0.295159 | -0.587401 | 0.369033 | -0.266325 | 0.604469 | 2 | 1 |
| 230 | -1.085049 | 0.879566 | 0.442593 | 0.128917 | 0.393498 | 0.531555 | 0.392194 | 1.418515 | 0.891015 | -0.348926 | -0.756201 | -0.838584 | -0.015971 | 3 | 1 |
| 231 | -0.352258 | 0.556982 | 0.530520 | 0.443818 | 0.300921 | 0.032128 | -0.797384 | -0.573532 | 0.398084 | 0.328875 | -0.274964 | -1.300920 | 0.254456 | 0 | 1 |
| 232 | -1.190363 | 0.797356 | 0.758472 | 0.587917 | 0.890540 | 0.471925 | 0.105793 | 0.680721 | 0.230834 | -0.150709 | -0.816744 | -0.470618 | 0.371198 | 4 | 1 |
| 233 | -0.651003 | -0.586618 | 1.326854 | -0.451354 | 0.507113 | 0.165474 | -0.919675 | -0.448249 | -1.310940 | -1.372737 | 0.406029 | -1.414627 | -0.434858 | 2 | 1 |
| 234 | -1.459511 | -0.516281 | 1.631699 | -1.141842 | 0.584621 | -0.458541 | -1.428877 | -0.934556 | -0.216455 | -0.049794 | 0.095580 | 0.387068 | 0.693730 | 2 | 1 |
| 235 | -0.726984 | 0.702447 | 0.798069 | -0.320660 | 0.530902 | 1.019988 | 0.144995 | 0.207847 | 0.039592 | 0.220761 | 0.762941 | 0.575034 | 0.671517 | 4 | 1 |
| 236 | -0.300986 | -0.404923 | 0.715406 | 0.245380 | -0.427936 | -0.334843 | -0.228084 | -0.330898 | -0.674327 | 0.199560 | 0.827455 | 0.016433 | 0.866789 | 0 | 1 |
| 237 | -0.736244 | 0.088611 | 0.910051 | 0.437100 | 0.258256 | 0.363828 | -0.415290 | -0.717445 | -0.012727 | 0.436925 | -0.786954 | -1.217376 | 0.352825 | 2 | 1 |
| 238 | 0.610473 | -2.664315 | 1.303652 | -2.022376 | 1.500032 | -1.280926 | -1.249533 | 0.432111 | -0.768558 | 0.291156 | -0.092312 | 0.053770 | -0.401166 | 2 | 1 |
| 239 | -2.045424 | -2.954642 | 0.302601 | -0.868092 | -1.038134 | -1.230777 | 0.514329 | 0.057591 | -1.023895 | 0.275395 | -1.450282 | 0.386242 | 0.318763 | 2 | 1 |
| 240 | 0.329793 | -1.367570 | -1.454329 | -0.207924 | -0.723609 | -0.149025 | -0.085298 | -0.011595 | -0.240239 | -0.009120 | -0.325229 | -0.025722 | 0.114182 | 1 | 1 |
| 241 | -1.919591 | 1.382172 | -0.134161 | 0.837967 | -0.687780 | 0.944303 | -0.258652 | -0.742178 | 0.386031 | -1.178099 | -1.843543 | -0.710556 | -0.318561 | 3 | 1 |
| 242 | -2.087669 | 1.400006 | -0.494964 | 0.451717 | -0.759188 | 0.736625 | 0.133121 | -0.196031 | 1.121231 | 0.474128 | -0.345937 | -0.409324 | -0.442069 | 4 | 1 |
| 243 | -2.131652 | 0.439305 | -0.612226 | 0.854126 | -0.494550 | 0.825299 | 0.301373 | -0.018964 | 0.690556 | -0.078762 | -0.709495 | -0.075857 | -0.418656 | 4 | 1 |
| 244 | -1.611989 | -0.756403 | -0.410917 | 1.075909 | 0.297336 | -1.317576 | 1.115011 | -0.467065 | -0.768378 | 1.615499 | 1.611125 | -1.018782 | -1.798744 | 2 | 1 |
| 245 | -0.142010 | 0.000190 | -0.063461 | -0.506353 | -0.386942 | -0.256144 | 0.270621 | -1.497417 | 0.507892 | 0.456828 | -0.431169 | -0.978417 | 0.015849 | 2 | 1 |
| 246 | -1.263975 | -1.168117 | -1.396090 | -0.312016 | 1.862268 | 1.400290 | 0.646060 | -0.686864 | 0.418524 | -0.069926 | -0.653856 | -0.853617 | -0.106814 | 2 | 1 |
| 247 | -0.507700 | 0.899825 | 1.510153 | 1.083642 | 2.081451 | 0.589016 | 0.901321 | 0.658808 | 0.152596 | 0.176442 | -0.447633 | 0.287838 | 0.650479 | 4 | 1 |
| 248 | -0.159768 | 0.518093 | 2.197018 | 0.698491 | 0.476336 | -2.014255 | -1.614667 | -0.397282 | -1.781932 | -0.208894 | 1.650551 | -0.771436 | -0.987237 | 0 | 1 |
| 249 | -1.037899 | 1.016712 | 2.774230 | 0.665468 | -0.385673 | 0.587263 | -0.121609 | -0.331379 | 0.622484 | -0.387131 | -0.276584 | 0.218207 | 1.689216 | 0 | 1 |
| 250 | -0.526923 | -1.169944 | 0.474875 | -0.789231 | 0.369827 | -0.537003 | -1.089843 | -0.173366 | -0.023237 | -0.142334 | 0.740065 | 0.813114 | 0.872556 | 2 | 1 |
| 251 | -0.770856 | -1.024349 | -0.019140 | -0.097521 | 0.092703 | 0.369242 | -0.273901 | 0.190740 | -0.074032 | 0.113055 | 0.140291 | -0.696275 | 0.166679 | 2 | 1 |
| 252 | -0.905458 | -0.790575 | 0.206164 | -0.723816 | -0.444860 | 0.107833 | -0.734514 | -0.533865 | -0.634334 | 0.320526 | 0.088428 | -0.348210 | 0.347201 | 2 | 1 |
| 253 | -1.378235 | -0.338405 | 0.016815 | -0.394563 | 0.034043 | 1.023865 | -0.303960 | -1.316121 | 0.198697 | 0.670577 | 0.809574 | 0.580565 | 0.056004 | 2 | 1 |
| 254 | -0.199959 | -2.035812 | -0.904507 | -1.511975 | -0.437843 | 0.262972 | -1.943788 | -1.963300 | -2.256227 | 0.354369 | -0.039829 | 0.882325 | 0.139307 | 2 | 1 |
255 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82aec1940>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[2]))
X = df_n_ps_std_mfcc[2].drop(columns='Cluster')
y = df_n_ps[2]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(231, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (30,), 'learning_rate_init': 0.001, 'max_iter': 1000}, que permiten obtener un Accuracy de 84.42% y un Kappa del 45.84
Tiempo total: 27.81 minutos
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_3 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_8 (Dense) (None, 30) 420 _________________________________________________________________ dense_9 (Dense) (None, 1) 31 ================================================================= Total params: 451 Trainable params: 451 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 231 samples, validate on 78 samples Epoch 1/1000 231/231 [==============================] - 0s 1ms/step - loss: 0.8119 - accuracy: 0.4589 - val_loss: 0.7457 - val_accuracy: 0.5385 Epoch 2/1000 231/231 [==============================] - 0s 126us/step - loss: 0.7766 - accuracy: 0.4935 - val_loss: 0.7248 - val_accuracy: 0.5513 Epoch 3/1000 231/231 [==============================] - 0s 61us/step - loss: 0.7467 - accuracy: 0.5411 - val_loss: 0.7062 - val_accuracy: 0.5385 Epoch 4/1000 231/231 [==============================] - 0s 52us/step - loss: 0.7166 - accuracy: 0.5931 - val_loss: 0.6883 - val_accuracy: 0.5513 Epoch 5/1000 231/231 [==============================] - 0s 56us/step - loss: 0.6911 - accuracy: 0.6147 - val_loss: 0.6739 - val_accuracy: 0.5641 Epoch 6/1000 231/231 [==============================] - 0s 56us/step - loss: 0.6673 - accuracy: 0.6234 - val_loss: 0.6607 - val_accuracy: 0.5897 Epoch 7/1000 231/231 [==============================] - 0s 78us/step - loss: 0.6460 - accuracy: 0.6364 - val_loss: 0.6480 - val_accuracy: 0.5897 Epoch 8/1000 231/231 [==============================] - 0s 56us/step - loss: 0.6258 - accuracy: 0.6623 - val_loss: 0.6371 - val_accuracy: 0.6154 Epoch 9/1000 231/231 [==============================] - 0s 74us/step - loss: 0.6079 - accuracy: 0.6883 - val_loss: 0.6269 - val_accuracy: 0.6667 Epoch 10/1000 231/231 [==============================] - 0s 61us/step - loss: 0.5915 - accuracy: 0.7229 - val_loss: 0.6189 - val_accuracy: 0.6923 Epoch 11/1000 231/231 [==============================] - 0s 61us/step - loss: 0.5768 - accuracy: 0.7403 - val_loss: 0.6096 - val_accuracy: 0.7051 Epoch 12/1000 231/231 [==============================] - 0s 56us/step - loss: 0.5636 - accuracy: 0.7532 - val_loss: 0.6025 - val_accuracy: 0.7308 Epoch 13/1000 231/231 [==============================] - 0s 61us/step - loss: 0.5516 - accuracy: 0.7662 - val_loss: 0.5950 - val_accuracy: 0.7564 Epoch 14/1000 231/231 [==============================] - 0s 56us/step - loss: 0.5405 - accuracy: 0.7619 - val_loss: 0.5887 - val_accuracy: 0.7692 Epoch 15/1000 231/231 [==============================] - 0s 61us/step - loss: 0.5306 - accuracy: 0.7706 - val_loss: 0.5840 - val_accuracy: 0.7564 Epoch 16/1000 231/231 [==============================] - 0s 91us/step - loss: 0.5200 - accuracy: 0.7835 - val_loss: 0.5799 - val_accuracy: 0.7564 Epoch 17/1000 231/231 [==============================] - 0s 65us/step - loss: 0.5108 - accuracy: 0.7879 - val_loss: 0.5743 - val_accuracy: 0.7436 Epoch 18/1000 231/231 [==============================] - 0s 91us/step - loss: 0.5024 - accuracy: 0.7965 - val_loss: 0.5700 - val_accuracy: 0.7179 Epoch 19/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4944 - accuracy: 0.8139 - val_loss: 0.5660 - val_accuracy: 0.7051 Epoch 20/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4864 - accuracy: 0.8139 - val_loss: 0.5606 - val_accuracy: 0.7179 Epoch 21/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4797 - accuracy: 0.8139 - val_loss: 0.5552 - val_accuracy: 0.7179 Epoch 22/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4734 - accuracy: 0.8182 - val_loss: 0.5507 - val_accuracy: 0.7308 Epoch 23/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4674 - accuracy: 0.8225 - val_loss: 0.5486 - val_accuracy: 0.7308 Epoch 24/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4615 - accuracy: 0.8225 - val_loss: 0.5479 - val_accuracy: 0.7436 Epoch 00024: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257. Epoch 25/1000 231/231 [==============================] - ETA: 0s - loss: 0.4421 - accuracy: 0.84 - 0s 56us/step - loss: 0.4574 - accuracy: 0.8182 - val_loss: 0.5472 - val_accuracy: 0.7436 Epoch 26/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4552 - accuracy: 0.8182 - val_loss: 0.5465 - val_accuracy: 0.7564 Epoch 27/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4527 - accuracy: 0.8139 - val_loss: 0.5450 - val_accuracy: 0.7564 Epoch 28/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4502 - accuracy: 0.8139 - val_loss: 0.5441 - val_accuracy: 0.7564 Epoch 29/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4479 - accuracy: 0.8182 - val_loss: 0.5429 - val_accuracy: 0.7564 Epoch 30/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4459 - accuracy: 0.8182 - val_loss: 0.5424 - val_accuracy: 0.7436 Epoch 31/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4439 - accuracy: 0.8182 - val_loss: 0.5413 - val_accuracy: 0.7436 Epoch 32/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4419 - accuracy: 0.8182 - val_loss: 0.5407 - val_accuracy: 0.7436 Epoch 33/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4402 - accuracy: 0.8139 - val_loss: 0.5400 - val_accuracy: 0.7436 Epoch 34/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4385 - accuracy: 0.8139 - val_loss: 0.5398 - val_accuracy: 0.7436 Epoch 00034: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628. Epoch 35/1000 231/231 [==============================] - 0s 52us/step - loss: 0.4370 - accuracy: 0.8139 - val_loss: 0.5392 - val_accuracy: 0.7436 Epoch 36/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4361 - accuracy: 0.8182 - val_loss: 0.5391 - val_accuracy: 0.7436 Epoch 37/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4353 - accuracy: 0.8182 - val_loss: 0.5390 - val_accuracy: 0.7436 Epoch 38/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4346 - accuracy: 0.8182 - val_loss: 0.5384 - val_accuracy: 0.7436 Epoch 39/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4337 - accuracy: 0.8182 - val_loss: 0.5387 - val_accuracy: 0.7436 Epoch 40/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4329 - accuracy: 0.8182 - val_loss: 0.5382 - val_accuracy: 0.7436 Epoch 41/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4320 - accuracy: 0.8182 - val_loss: 0.5377 - val_accuracy: 0.7436 Epoch 42/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4311 - accuracy: 0.8139 - val_loss: 0.5370 - val_accuracy: 0.7436 Epoch 43/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4304 - accuracy: 0.8095 - val_loss: 0.5363 - val_accuracy: 0.7436 Epoch 44/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4298 - accuracy: 0.8095 - val_loss: 0.5358 - val_accuracy: 0.7436 Epoch 00044: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814. Epoch 45/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4291 - accuracy: 0.8139 - val_loss: 0.5355 - val_accuracy: 0.7436 Epoch 46/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4286 - accuracy: 0.8139 - val_loss: 0.5353 - val_accuracy: 0.7564 Epoch 47/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4282 - accuracy: 0.8182 - val_loss: 0.5353 - val_accuracy: 0.7436 Epoch 48/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4279 - accuracy: 0.8182 - val_loss: 0.5352 - val_accuracy: 0.7564 Epoch 49/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4275 - accuracy: 0.8182 - val_loss: 0.5350 - val_accuracy: 0.7436 Epoch 50/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4271 - accuracy: 0.8182 - val_loss: 0.5351 - val_accuracy: 0.7564 Epoch 51/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4267 - accuracy: 0.8182 - val_loss: 0.5350 - val_accuracy: 0.7436 Epoch 52/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4265 - accuracy: 0.8182 - val_loss: 0.5350 - val_accuracy: 0.7436 Epoch 53/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4261 - accuracy: 0.8182 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 54/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4258 - accuracy: 0.8182 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 00054: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05. Epoch 55/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4255 - accuracy: 0.8182 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 56/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4253 - accuracy: 0.8182 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 57/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4251 - accuracy: 0.8182 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 58/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4250 - accuracy: 0.8182 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 59/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4248 - accuracy: 0.8182 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 60/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4246 - accuracy: 0.8182 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 61/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4244 - accuracy: 0.8182 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 62/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4243 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 63/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4241 - accuracy: 0.8139 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 64/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4240 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 00064: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05. Epoch 65/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4238 - accuracy: 0.8182 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 66/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4237 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 67/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4236 - accuracy: 0.8139 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 68/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4235 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 69/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4235 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 70/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4234 - accuracy: 0.8139 - val_loss: 0.5346 - val_accuracy: 0.7436 Epoch 71/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4233 - accuracy: 0.8139 - val_loss: 0.5346 - val_accuracy: 0.7436 Epoch 72/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4232 - accuracy: 0.8139 - val_loss: 0.5346 - val_accuracy: 0.7436 Epoch 73/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4232 - accuracy: 0.8139 - val_loss: 0.5346 - val_accuracy: 0.7436 Epoch 74/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4231 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 00074: ReduceLROnPlateau reducing learning rate to 1.5625000742147677e-05. Epoch 75/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4230 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 76/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4230 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 77/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4229 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 78/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4229 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 79/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4228 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 80/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4228 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 81/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4227 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 82/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4227 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 83/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4227 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 84/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4226 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 00084: ReduceLROnPlateau reducing learning rate to 7.812500371073838e-06. Epoch 85/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4226 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 86/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 87/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 88/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 89/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 90/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 91/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 92/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 93/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 94/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 00094: ReduceLROnPlateau reducing learning rate to 3.906250185536919e-06. Epoch 95/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 96/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 97/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 98/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 99/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 100/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 101/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 102/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 103/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 104/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 00104: ReduceLROnPlateau reducing learning rate to 1.9531250927684596e-06. Epoch 105/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 106/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 107/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 108/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 109/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 110/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 111/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 112/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 113/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 114/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00114: ReduceLROnPlateau reducing learning rate to 9.765625463842298e-07. Epoch 115/1000 231/231 [==============================] - 0s 130us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 116/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 117/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 118/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 119/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 120/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 121/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 122/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 123/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 124/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00124: ReduceLROnPlateau reducing learning rate to 4.882812731921149e-07. Epoch 125/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 126/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 127/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 128/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 129/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 130/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 131/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 132/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 133/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 134/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00134: ReduceLROnPlateau reducing learning rate to 2.4414063659605745e-07. Epoch 135/1000 231/231 [==============================] - 0s 134us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 136/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 137/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 138/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 139/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 140/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 141/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 142/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 143/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 144/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00144: ReduceLROnPlateau reducing learning rate to 1.2207031829802872e-07. Epoch 145/1000 231/231 [==============================] - 0s 1ms/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 146/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 147/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 148/1000 231/231 [==============================] - 0s 114us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 149/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 150/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 151/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 152/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 153/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 154/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00154: ReduceLROnPlateau reducing learning rate to 6.103515914901436e-08. Epoch 155/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 156/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 157/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 158/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 159/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 160/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 161/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 162/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 163/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 164/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00164: ReduceLROnPlateau reducing learning rate to 3.051757957450718e-08. Epoch 165/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 166/1000 231/231 [==============================] - 0s 143us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 167/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 168/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 169/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 170/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 171/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 172/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 173/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 174/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00174: ReduceLROnPlateau reducing learning rate to 1.525878978725359e-08. Epoch 175/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 176/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 177/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 178/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 179/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 180/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 181/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 182/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 183/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 184/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00184: ReduceLROnPlateau reducing learning rate to 7.629394893626795e-09. Epoch 185/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 186/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 187/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 188/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 189/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 190/1000 231/231 [==============================] - 0s 125us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 191/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 192/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 193/1000 231/231 [==============================] - ETA: 0s - loss: 0.3107 - accuracy: 0.90 - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 194/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00194: ReduceLROnPlateau reducing learning rate to 3.814697446813398e-09. Epoch 195/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 196/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 197/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 198/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 199/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 200/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 201/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 202/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 203/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 204/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00204: ReduceLROnPlateau reducing learning rate to 1.907348723406699e-09. Epoch 205/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 206/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 207/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 208/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 209/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 210/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 211/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 212/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 213/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 214/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00214: ReduceLROnPlateau reducing learning rate to 9.536743617033494e-10. Epoch 215/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 216/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 217/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 218/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 219/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 220/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 221/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 222/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 223/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 224/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00224: ReduceLROnPlateau reducing learning rate to 4.768371808516747e-10. Epoch 225/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 226/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 227/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 228/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 229/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 230/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 231/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 232/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 233/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 234/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00234: ReduceLROnPlateau reducing learning rate to 2.3841859042583735e-10. Epoch 235/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 236/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 237/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 238/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 239/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 240/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 241/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 242/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 243/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 244/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00244: ReduceLROnPlateau reducing learning rate to 1.1920929521291868e-10. Epoch 245/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 246/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 247/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 248/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 249/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 250/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 251/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 252/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 253/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 254/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00254: ReduceLROnPlateau reducing learning rate to 5.960464760645934e-11. Epoch 255/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 256/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 257/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 258/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 259/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 260/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 261/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 262/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 263/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 264/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00264: ReduceLROnPlateau reducing learning rate to 2.980232380322967e-11. Epoch 265/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 266/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 267/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 268/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 269/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 270/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 271/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 272/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 273/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 274/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00274: ReduceLROnPlateau reducing learning rate to 1.4901161901614834e-11. Epoch 275/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 276/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 277/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 278/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 279/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 280/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 281/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 282/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 283/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 284/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00284: ReduceLROnPlateau reducing learning rate to 7.450580950807417e-12. Epoch 285/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 286/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 287/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 288/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 289/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 290/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 291/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 292/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 293/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 294/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00294: ReduceLROnPlateau reducing learning rate to 3.725290475403709e-12. Epoch 295/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 296/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 297/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 298/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 299/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 300/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 301/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 302/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 303/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 304/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00304: ReduceLROnPlateau reducing learning rate to 1.8626452377018543e-12. Epoch 305/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 306/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 307/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 308/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 309/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 310/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 311/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 312/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 313/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 314/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00314: ReduceLROnPlateau reducing learning rate to 9.313226188509272e-13. Epoch 315/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 316/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 317/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 318/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 319/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 320/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 321/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 322/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 323/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 324/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00324: ReduceLROnPlateau reducing learning rate to 4.656613094254636e-13. Epoch 325/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 326/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 327/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 328/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 329/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 330/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 331/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 332/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 333/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 334/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00334: ReduceLROnPlateau reducing learning rate to 2.328306547127318e-13. Epoch 335/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 336/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 337/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 338/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 339/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 340/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 341/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 342/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 343/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 344/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00344: ReduceLROnPlateau reducing learning rate to 1.164153273563659e-13. Epoch 345/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 346/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 347/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 348/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 349/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 350/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 351/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 352/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 353/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 354/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00354: ReduceLROnPlateau reducing learning rate to 5.820766367818295e-14. Epoch 355/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 356/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 357/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 358/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 359/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 360/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 361/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 362/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 363/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 364/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00364: ReduceLROnPlateau reducing learning rate to 2.9103831839091474e-14. Epoch 365/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 366/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 367/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 368/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 369/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 370/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 371/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 372/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 373/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 374/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00374: ReduceLROnPlateau reducing learning rate to 1.4551915919545737e-14. Epoch 375/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 376/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 377/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 378/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 379/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 380/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 381/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 382/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 383/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 384/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00384: ReduceLROnPlateau reducing learning rate to 7.275957959772868e-15. Epoch 385/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 386/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 387/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 388/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 389/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 390/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 391/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 392/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 393/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 394/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00394: ReduceLROnPlateau reducing learning rate to 3.637978979886434e-15. Epoch 395/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 396/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 397/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 398/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 399/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 400/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 401/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 402/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 403/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 404/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00404: ReduceLROnPlateau reducing learning rate to 1.818989489943217e-15. Epoch 405/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 406/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 407/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 408/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 409/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 410/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 411/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 412/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 413/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 414/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00414: ReduceLROnPlateau reducing learning rate to 9.094947449716085e-16. Epoch 415/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 416/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 417/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 418/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 419/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 420/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 421/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 422/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 423/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 424/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00424: ReduceLROnPlateau reducing learning rate to 4.547473724858043e-16. Epoch 425/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 426/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 427/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 428/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 429/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 430/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 431/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 432/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 433/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 434/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00434: ReduceLROnPlateau reducing learning rate to 2.2737368624290214e-16. Epoch 435/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 436/1000 231/231 [==============================] - ETA: 0s - loss: 0.6507 - accuracy: 0.62 - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 437/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 438/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 439/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 440/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 441/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 442/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 443/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 444/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00444: ReduceLROnPlateau reducing learning rate to 1.1368684312145107e-16. Epoch 445/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 446/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 447/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 448/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 449/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 450/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 451/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 452/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 453/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 454/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00454: ReduceLROnPlateau reducing learning rate to 5.684342156072553e-17. Epoch 455/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 456/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 457/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 458/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 459/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 460/1000 231/231 [==============================] - 0s 203us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 461/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 462/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 463/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 464/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00464: ReduceLROnPlateau reducing learning rate to 2.842171078036277e-17. Epoch 465/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 466/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 467/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 468/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 469/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 470/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 471/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 472/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 473/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 474/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00474: ReduceLROnPlateau reducing learning rate to 1.4210855390181384e-17. Epoch 475/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 476/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 477/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 478/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 479/1000 231/231 [==============================] - 0s 125us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 480/1000 231/231 [==============================] - ETA: 0s - loss: 0.4785 - accuracy: 0.78 - 0s 143us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 481/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 482/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 483/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 484/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00484: ReduceLROnPlateau reducing learning rate to 7.105427695090692e-18. Epoch 485/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 486/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 487/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 488/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 489/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 490/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 491/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 492/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 493/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 494/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00494: ReduceLROnPlateau reducing learning rate to 3.552713847545346e-18. Epoch 495/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 496/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 497/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 498/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 499/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 500/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 501/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 502/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 503/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 504/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00504: ReduceLROnPlateau reducing learning rate to 1.776356923772673e-18. Epoch 505/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 506/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 507/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 508/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 509/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 510/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 511/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 512/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 513/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 514/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00514: ReduceLROnPlateau reducing learning rate to 8.881784618863365e-19. Epoch 515/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 516/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 517/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 518/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 519/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 520/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 521/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 522/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 523/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 524/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00524: ReduceLROnPlateau reducing learning rate to 4.440892309431682e-19. Epoch 525/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 526/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 527/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 528/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 529/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 530/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 531/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 532/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 533/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 534/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00534: ReduceLROnPlateau reducing learning rate to 2.220446154715841e-19. Epoch 535/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 536/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 537/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 538/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 539/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 540/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 541/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 542/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 543/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 544/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00544: ReduceLROnPlateau reducing learning rate to 1.1102230773579206e-19. Epoch 545/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 546/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 547/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 548/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 549/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 550/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 551/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 552/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 553/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 554/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00554: ReduceLROnPlateau reducing learning rate to 5.551115386789603e-20. Epoch 555/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 556/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 557/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 558/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 559/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 560/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 561/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 562/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 563/1000 231/231 [==============================] - 0s 125us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 564/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00564: ReduceLROnPlateau reducing learning rate to 2.7755576933948015e-20. Epoch 565/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 566/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 567/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 568/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 569/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 570/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 571/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 572/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 573/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 574/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00574: ReduceLROnPlateau reducing learning rate to 1.3877788466974007e-20. Epoch 575/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 576/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 577/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 578/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 579/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 580/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 581/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 582/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 583/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 584/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00584: ReduceLROnPlateau reducing learning rate to 6.938894233487004e-21. Epoch 585/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 586/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 587/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 588/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 589/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 590/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 591/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 592/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 593/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 594/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00594: ReduceLROnPlateau reducing learning rate to 3.469447116743502e-21. Epoch 595/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 596/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 597/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 598/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 599/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 600/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 601/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 602/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 603/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 604/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00604: ReduceLROnPlateau reducing learning rate to 1.734723558371751e-21. Epoch 605/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 606/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 607/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 608/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 609/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 610/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 611/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 612/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 613/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 614/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00614: ReduceLROnPlateau reducing learning rate to 8.673617791858755e-22. Epoch 615/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 616/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 617/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 618/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 619/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 620/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 621/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 622/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 623/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 624/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00624: ReduceLROnPlateau reducing learning rate to 4.336808895929377e-22. Epoch 625/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 626/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 627/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 628/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 629/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 630/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 631/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 632/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 633/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 634/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00634: ReduceLROnPlateau reducing learning rate to 2.1684044479646887e-22. Epoch 635/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 636/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 637/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 638/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 639/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 640/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 641/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 642/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 643/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 644/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00644: ReduceLROnPlateau reducing learning rate to 1.0842022239823443e-22. Epoch 645/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 646/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 647/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 648/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 649/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 650/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 651/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 652/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 653/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 654/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00654: ReduceLROnPlateau reducing learning rate to 5.421011119911722e-23. Epoch 655/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 656/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 657/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 658/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 659/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 660/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 661/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 662/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 663/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 664/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00664: ReduceLROnPlateau reducing learning rate to 2.710505559955861e-23. Epoch 665/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 666/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 667/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 668/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 669/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 670/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 671/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 672/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 673/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 674/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00674: ReduceLROnPlateau reducing learning rate to 1.3552527799779304e-23. Epoch 675/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 676/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 677/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 678/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 679/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 680/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 681/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 682/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 683/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 684/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00684: ReduceLROnPlateau reducing learning rate to 6.776263899889652e-24. Epoch 685/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 686/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 687/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 688/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 689/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 690/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 691/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 692/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 693/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 694/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00694: ReduceLROnPlateau reducing learning rate to 3.388131949944826e-24. Epoch 695/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 696/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 697/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 698/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 699/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 700/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 701/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 702/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 703/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 704/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00704: ReduceLROnPlateau reducing learning rate to 1.694065974972413e-24. Epoch 705/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 706/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 707/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 708/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 709/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 710/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 711/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 712/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 713/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 714/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00714: ReduceLROnPlateau reducing learning rate to 8.470329874862065e-25. Epoch 715/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 716/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 717/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 718/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 719/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 720/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 721/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 722/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 723/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 724/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00724: ReduceLROnPlateau reducing learning rate to 4.2351649374310325e-25. Epoch 725/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 726/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 727/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 728/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 729/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 730/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 731/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 732/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 733/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 734/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00734: ReduceLROnPlateau reducing learning rate to 2.1175824687155163e-25. Epoch 735/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 736/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 737/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 738/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 739/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 740/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 741/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 742/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 743/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 744/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00744: ReduceLROnPlateau reducing learning rate to 1.0587912343577581e-25. Epoch 745/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 746/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 747/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 748/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 749/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 750/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 751/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 752/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 753/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 754/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00754: ReduceLROnPlateau reducing learning rate to 5.293956171788791e-26. Epoch 755/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 756/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 757/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 758/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 759/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 760/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 761/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 762/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 763/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 764/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00764: ReduceLROnPlateau reducing learning rate to 2.6469780858943953e-26. Epoch 765/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 766/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 767/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 768/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 769/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 770/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 771/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 772/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 773/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 774/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00774: ReduceLROnPlateau reducing learning rate to 1.3234890429471977e-26. Epoch 775/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 776/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 777/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 778/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 779/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 780/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 781/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 782/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 783/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 784/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00784: ReduceLROnPlateau reducing learning rate to 6.617445214735988e-27. Epoch 785/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 786/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 787/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 788/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 789/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 790/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 791/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 792/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 793/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 794/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00794: ReduceLROnPlateau reducing learning rate to 3.308722607367994e-27. Epoch 795/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 796/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 797/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 798/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 799/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 800/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 801/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 802/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 803/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 804/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00804: ReduceLROnPlateau reducing learning rate to 1.654361303683997e-27. Epoch 805/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 806/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 807/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 808/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 809/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 810/1000 231/231 [==============================] - 0s 130us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 811/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 812/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 813/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 814/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00814: ReduceLROnPlateau reducing learning rate to 8.271806518419985e-28. Epoch 815/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 816/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 817/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 818/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 819/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 820/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 821/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 822/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 823/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 824/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00824: ReduceLROnPlateau reducing learning rate to 4.135903259209993e-28. Epoch 825/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 826/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 827/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 828/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 829/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 830/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 831/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 832/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 833/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 834/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00834: ReduceLROnPlateau reducing learning rate to 2.0679516296049964e-28. Epoch 835/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 836/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 837/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 838/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 839/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 840/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 841/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 842/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 843/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 844/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00844: ReduceLROnPlateau reducing learning rate to 1.0339758148024982e-28. Epoch 845/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 846/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 847/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 848/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 849/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 850/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 851/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 852/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 853/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 854/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00854: ReduceLROnPlateau reducing learning rate to 5.169879074012491e-29. Epoch 855/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 856/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 857/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 858/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 859/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 860/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 861/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 862/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 863/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 864/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00864: ReduceLROnPlateau reducing learning rate to 2.5849395370062454e-29. Epoch 865/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 866/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 867/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 868/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 869/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 870/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 871/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 872/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 873/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 874/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00874: ReduceLROnPlateau reducing learning rate to 1.2924697685031227e-29. Epoch 875/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 876/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 877/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 878/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 879/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 880/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 881/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 882/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 883/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 884/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00884: ReduceLROnPlateau reducing learning rate to 6.462348842515614e-30. Epoch 885/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 886/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 887/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 888/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 889/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 890/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 891/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 892/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 893/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 894/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00894: ReduceLROnPlateau reducing learning rate to 3.231174421257807e-30. Epoch 895/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 896/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 897/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 898/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 899/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 900/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 901/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 902/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 903/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 904/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00904: ReduceLROnPlateau reducing learning rate to 1.6155872106289034e-30. Epoch 905/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 906/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 907/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 908/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 909/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 910/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 911/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 912/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 913/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 914/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00914: ReduceLROnPlateau reducing learning rate to 8.077936053144517e-31. Epoch 915/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 916/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 917/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 918/1000 231/231 [==============================] - 0s 130us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 919/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 920/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 921/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 922/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 923/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 924/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00924: ReduceLROnPlateau reducing learning rate to 4.0389680265722585e-31. Epoch 925/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 926/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 927/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 928/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 929/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 930/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 931/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 932/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 933/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 934/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00934: ReduceLROnPlateau reducing learning rate to 2.0194840132861292e-31. Epoch 935/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 936/1000 231/231 [==============================] - 0s 143us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 937/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 938/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 939/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 940/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 941/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 942/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 943/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 944/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00944: ReduceLROnPlateau reducing learning rate to 1.0097420066430646e-31. Epoch 945/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 946/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 947/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 948/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 949/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 950/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 951/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 952/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 953/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 954/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00954: ReduceLROnPlateau reducing learning rate to 5.048710033215323e-32. Epoch 955/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 956/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 957/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 958/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 959/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 960/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 961/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 962/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 963/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 964/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00964: ReduceLROnPlateau reducing learning rate to 2.5243550166076616e-32. Epoch 965/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 966/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 967/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 968/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 969/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 970/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 971/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 972/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 973/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 974/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00974: ReduceLROnPlateau reducing learning rate to 1.2621775083038308e-32. Epoch 975/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 976/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 977/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 978/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 979/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 980/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 981/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 982/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 983/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 984/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00984: ReduceLROnPlateau reducing learning rate to 6.310887541519154e-33. Epoch 985/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 986/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 987/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 988/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 989/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 990/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 991/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 992/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 993/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 994/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00994: ReduceLROnPlateau reducing learning rate to 3.155443770759577e-33. Epoch 995/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 996/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 997/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 998/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 999/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 1000/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 2000)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
78/78 [==============================] - 0s 77us/step test loss: 0.5342327150014731, test accuracy: 0.7435897588729858
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.655664585191793
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.07253269916765748
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.784459 | 0.109342 | 0.681608 | 1.151904 | -0.707724 | 0.736365 | 0.241404 | -0.461605 | 1.084621 | 0.123497 | -0.150398 | 1.784532 | 0.824544 |
| 1 | 0.213444 | 0.453851 | 0.215705 | 0.987439 | -1.851748 | -0.001814 | -0.218203 | 0.548263 | -0.521851 | 1.253720 | 0.882518 | -0.892913 | 0.218184 |
| 2 | 1.478029 | 0.664210 | 0.484232 | 0.450284 | -0.427587 | -0.448079 | -0.195272 | -0.610679 | -0.675633 | 0.759098 | -0.107303 | -0.440790 | -1.398093 |
| 3 | -0.846386 | -0.742706 | -0.734786 | -0.199585 | -0.328948 | -0.230911 | 0.620910 | 0.911236 | 1.274407 | 0.150882 | -0.603865 | -0.919849 | 0.386069 |
| 4 | -0.952033 | -0.794706 | -1.141199 | -0.070979 | 0.454453 | 0.544600 | 1.427005 | 1.918539 | 1.204102 | 0.076961 | -0.328712 | -1.051776 | -0.151007 |
| 5 | -1.244473 | 2.250723 | 2.321007 | 0.549219 | 1.971653 | -1.427849 | 0.392314 | -1.015093 | 0.157566 | 0.486970 | -0.455070 | 0.229936 | 0.185742 |
| 6 | -0.128652 | 0.958605 | 1.191477 | 0.705974 | 0.793937 | -0.709130 | -0.221572 | 0.922219 | 0.500475 | -0.571099 | 0.521526 | 0.567361 | -0.486761 |
| 7 | -1.148662 | 0.562286 | 0.636059 | 0.222732 | 0.741470 | 0.009108 | -0.949365 | -0.486631 | -0.519627 | -0.739600 | -0.415884 | -0.026850 | 0.290685 |
| 8 | -0.440190 | 0.461355 | -0.016542 | -0.158636 | 0.985626 | -0.417310 | 0.182134 | 0.290631 | 0.379949 | 0.069159 | 1.080013 | 0.220566 | -0.042505 |
| 9 | -0.695730 | 0.398404 | 1.069978 | -0.124019 | 0.736597 | -0.912452 | 0.673156 | 0.604840 | 0.175505 | 0.496158 | 0.541788 | 0.649837 | -0.680054 |
| 10 | -0.006662 | 0.163832 | 1.373872 | -0.095120 | 1.621755 | 1.048509 | 0.997122 | 0.721763 | 0.660834 | -1.076324 | 0.925997 | -0.147393 | -0.420465 |
| 11 | -0.771763 | -0.484525 | -0.874411 | 0.647747 | -1.241650 | 0.190918 | 0.457290 | 0.915208 | 1.999689 | 1.879761 | 0.491598 | -0.164372 | -0.560754 |
| 12 | 0.140770 | 1.869847 | -1.926303 | -2.491201 | -2.679759 | -1.527330 | -0.299345 | -0.550878 | 0.702947 | 0.143961 | 0.034796 | -0.379551 | -0.422354 |
| 13 | -1.952477 | -0.949813 | 0.063314 | 1.188657 | 1.059601 | 1.221319 | 0.070346 | 2.284107 | 2.889527 | 2.012105 | 1.053494 | -0.178905 | -2.004333 |
| 14 | -0.895529 | 0.398850 | -0.469782 | 1.216393 | 0.657294 | -0.550619 | -0.854637 | -0.815454 | 1.929689 | 1.499328 | -0.096775 | -0.174183 | -1.119396 |
| 15 | -1.161372 | 1.475106 | 1.486594 | 0.127516 | 0.213940 | 0.587080 | -0.789652 | 0.130203 | 1.199389 | 1.458358 | 0.404206 | 0.754289 | -0.784214 |
| 16 | -0.476792 | 2.179287 | 0.101035 | -1.393755 | -0.740834 | 0.589666 | 0.873850 | 0.630539 | 0.535702 | 0.387326 | -0.979677 | 0.259755 | 0.313358 |
| 17 | -0.089088 | -0.841832 | 0.674093 | -0.842623 | 0.904577 | -1.476862 | 1.853427 | -1.108621 | 0.720923 | 0.383320 | -1.842030 | 1.712321 | -1.612726 |
| 18 | -1.772732 | 0.488101 | 0.057829 | 0.041074 | 0.732429 | 1.052187 | 0.279830 | -0.350521 | -0.476338 | -0.833438 | 0.184849 | -0.055428 | 0.627307 |
| 19 | -0.640351 | 0.068493 | 0.619966 | -0.599171 | 0.860806 | -0.385120 | 1.955087 | -1.014740 | 1.224043 | 1.450896 | -2.604448 | 2.187869 | -0.464774 |
| 20 | 0.590240 | 0.699904 | -0.097902 | 0.127319 | -0.882999 | 0.319144 | -0.146142 | -0.540616 | 0.300593 | 0.688863 | 0.314647 | 0.709538 | 0.572811 |
| 21 | 0.500240 | 0.875222 | -0.833826 | 0.377484 | 0.023480 | 1.321472 | 1.094037 | 0.734507 | 0.141947 | 0.214524 | 0.508556 | -0.265911 | -0.372316 |
| 22 | -0.076653 | 0.518030 | 0.003390 | 0.452969 | -0.218736 | 0.115409 | 0.332618 | 0.611098 | 0.211893 | -0.206368 | 0.358363 | 0.614915 | 0.518172 |
| 23 | 0.010763 | -0.352873 | -0.460051 | 0.423968 | -0.228393 | -0.040296 | -0.740869 | -0.810034 | -1.379366 | -0.179024 | 0.147810 | -0.224826 | 0.615011 |
| 24 | 0.874600 | 0.173728 | -1.041125 | 0.845285 | 1.139221 | 0.264458 | -0.378878 | 0.430226 | -0.568469 | -1.237333 | 0.032074 | 0.812111 | 0.431460 |
| 25 | 0.200637 | 0.337376 | 0.022126 | 1.189135 | -0.210135 | -1.195492 | 0.067874 | 1.349711 | -0.534365 | -0.132754 | 0.055132 | 0.239009 | -0.275633 |
| 26 | 0.362627 | 0.159292 | -1.211688 | -0.555502 | 0.107540 | 0.797027 | -0.246321 | -1.113565 | -1.373054 | -2.369077 | -0.539483 | 1.032005 | 1.637730 |
| 27 | -0.504648 | -0.561515 | -2.173809 | -1.525691 | -0.810132 | -0.617474 | 0.441103 | 1.146056 | 1.464488 | -1.111032 | -0.742722 | 0.034623 | 0.200147 |
| 28 | -0.339646 | -2.140319 | -1.409226 | -0.207553 | -1.216547 | -1.135346 | -0.831817 | 1.136334 | -0.187159 | 1.388841 | 0.282573 | -0.807850 | -0.371992 |
| 29 | -1.279089 | 1.555887 | 0.890503 | 2.134195 | 0.337580 | -0.037382 | -2.046955 | -2.888113 | 1.329665 | 1.436687 | -1.576201 | 0.485256 | 1.429246 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 279 | -1.132789 | -0.931481 | -0.350024 | -0.228575 | -1.201208 | -1.044342 | 0.532403 | 1.667036 | 1.383485 | -0.967474 | -0.286625 | -1.920618 | -0.797190 |
| 280 | -0.375948 | 0.058369 | 0.489068 | 0.862825 | -1.876102 | -0.195043 | -1.163295 | 0.716190 | 0.384576 | -0.168340 | 1.542126 | -0.769460 | 0.456686 |
| 281 | 0.412883 | -1.703432 | -0.514845 | -1.382818 | -0.713972 | -0.476089 | 1.471006 | 0.826485 | 0.508608 | -1.311788 | -2.010635 | -1.122699 | -0.848851 |
| 282 | -0.152329 | -2.012108 | -0.217355 | -1.122627 | -0.851075 | 0.634424 | 1.711007 | 0.281350 | -0.565156 | -1.667195 | -1.942452 | -1.586592 | -0.485128 |
| 283 | 0.348443 | -2.381428 | 1.267515 | -1.713290 | 0.161262 | -1.589515 | 1.383857 | -0.218429 | 0.412550 | 0.382171 | -1.073499 | -1.745128 | -3.227845 |
| 284 | -0.895866 | 1.001673 | 1.059356 | 0.166883 | -0.710729 | 0.466737 | -0.857566 | -0.158962 | 0.004241 | 0.391823 | 0.576231 | 0.329506 | -1.331272 |
| 285 | 0.417102 | 1.957515 | 2.350604 | -1.125042 | -2.206390 | -0.674814 | -1.217854 | 0.372865 | 0.840465 | -0.472910 | 0.310419 | 1.379494 | 1.128412 |
| 286 | -0.900897 | -0.289100 | 0.433265 | -0.281829 | -0.379951 | 1.272236 | 0.313949 | -0.261980 | -0.053111 | 0.473694 | 0.493962 | -0.263293 | -0.657598 |
| 287 | -0.002448 | -0.853612 | 0.441903 | 0.406478 | -0.823085 | 0.590185 | -0.292046 | -0.079952 | -0.422138 | 0.579522 | -0.620415 | -0.298847 | 0.620798 |
| 288 | -0.528092 | -1.022206 | -0.348679 | 0.093718 | -1.642833 | -2.355166 | -0.992806 | -0.143423 | 0.270521 | 0.838321 | 0.843686 | 0.469574 | -0.325121 |
| 289 | -0.387248 | -1.305014 | -0.365540 | 0.202745 | -0.906016 | -1.785190 | -1.377992 | -0.544742 | -0.670979 | -0.785606 | 0.505505 | 0.502505 | -0.151297 |
| 290 | 0.425324 | -2.583173 | -2.181080 | -1.262030 | -0.179265 | 0.176164 | 1.763096 | 0.436737 | -2.048534 | -1.014266 | 1.298221 | 0.401742 | -1.080608 |
| 291 | -0.572282 | -0.375532 | -2.067885 | -0.361247 | -0.315065 | -0.671820 | -0.183865 | -0.517694 | -0.802956 | -0.951809 | 0.282442 | 0.208005 | -0.271252 |
| 292 | -0.084382 | -1.508230 | -0.105496 | -1.930204 | -1.529664 | -0.795467 | 1.273717 | -1.858542 | -0.446361 | -0.239346 | 0.154464 | -0.114937 | -1.831603 |
| 293 | -1.172703 | 0.783209 | -1.141589 | -0.982768 | -0.513216 | 0.655437 | 1.962510 | 0.628858 | 1.130028 | 1.104741 | 1.539591 | 1.547843 | -0.011302 |
| 294 | -1.293038 | 0.838303 | -1.049071 | -0.708031 | -0.779995 | 0.868108 | 1.621994 | 0.725495 | 1.173585 | 1.424395 | 1.751950 | 1.352876 | 0.339922 |
| 295 | 0.809878 | -0.351504 | -2.231752 | -0.556719 | -1.430264 | -0.357918 | -0.727837 | 1.110363 | 1.684188 | 0.429768 | 0.560061 | 0.371789 | -1.110030 |
| 296 | 0.305449 | -0.148924 | -0.727054 | -0.126830 | 0.467272 | 0.420013 | 1.212777 | 0.954055 | -0.988419 | -0.423614 | -0.047239 | 0.058678 | -0.031517 |
| 297 | 1.212224 | 1.916789 | 0.287969 | -0.073842 | 0.289112 | 0.943764 | -0.395404 | -0.380613 | 0.262567 | 0.759137 | 0.277177 | 0.493951 | 1.026995 |
| 298 | -0.121307 | 0.217217 | 0.030920 | -0.201270 | -0.752001 | -0.276070 | 0.835502 | -0.363704 | -0.641199 | 0.283313 | 0.060013 | 0.013280 | 0.477857 |
| 299 | -0.622824 | -0.595352 | 0.256282 | -0.111551 | 0.023990 | 1.221659 | 1.572998 | -0.263983 | -0.707828 | 0.707801 | 0.306249 | 1.046476 | 0.214979 |
| 300 | -0.667480 | -0.808638 | 0.730781 | 0.054549 | 0.191421 | 0.279885 | 0.088177 | 0.823617 | 0.604299 | 0.640274 | -0.360151 | 1.298688 | 0.494875 |
| 301 | 0.928382 | -2.375767 | -0.427528 | -0.852350 | -1.137004 | 1.584181 | -1.700220 | -2.060965 | -1.326622 | 0.451948 | 0.593212 | 0.152418 | -0.128797 |
| 302 | -0.483888 | 0.443846 | 0.129714 | 0.199624 | -0.106985 | 0.817702 | -0.072817 | -1.163918 | 0.545762 | -0.141320 | 0.041767 | -0.402181 | 0.061897 |
| 303 | 0.715769 | 0.780533 | 1.467750 | -0.595580 | -1.178484 | 4.014345 | -0.112339 | -1.611382 | -0.295511 | 0.032462 | 1.836607 | -4.315898 | -1.084441 |
| 304 | 0.041466 | -0.470275 | 0.234655 | 0.109532 | -0.518455 | -0.977540 | -0.613498 | -1.108545 | 0.500653 | -0.214143 | -0.033265 | -0.541673 | 0.714974 |
| 305 | 0.818747 | 0.495675 | 1.005686 | 0.967334 | 0.505171 | -0.579478 | -0.847677 | 1.574323 | 1.544556 | 0.412556 | -0.972040 | 0.290457 | 0.289042 |
| 306 | 1.062928 | -1.149587 | 1.951840 | -0.065775 | 0.546680 | 0.994901 | -1.817826 | 2.109742 | 0.264443 | 0.505287 | -0.757462 | 0.578677 | 0.222503 |
| 307 | -0.701621 | -0.049803 | -0.719153 | -0.048069 | 1.223251 | 1.913492 | 0.887449 | 0.038186 | 0.546172 | -0.568362 | -1.091833 | -0.250367 | 0.831399 |
| 308 | -0.079821 | 0.796085 | -0.215763 | -1.396439 | -0.133350 | 0.582037 | 2.442796 | 0.743250 | -1.182753 | -0.723658 | -0.879934 | -2.498899 | -1.532262 |
309 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[4016.9999999999995, 3599.8293806720085, 3349.349727264702, 3159.6428991584926, 3002.905895600155, 2909.3645052598604, 2804.695882234172, 2720.275460001156, 2622.695881163609, 2543.6022931320426, 2484.176525692807, 2436.6681239209124, 2402.363548718592, 2338.201438573343]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82b4d8dd8>]
K=2
kmeans_mfcc = KMeans(n_clusters=2, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=2, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1,
1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1,
0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0,
0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1,
1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1,
0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1,
1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1,
1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1,
0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0,
0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1,
1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1,
0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1,
1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.784459 | 0.109342 | 0.681608 | 1.151904 | -0.707724 | 0.736365 | 0.241404 | -0.461605 | 1.084621 | 0.123497 | -0.150398 | 1.784532 | 0.824544 | 1 | 0 |
| 1 | 0.213444 | 0.453851 | 0.215705 | 0.987439 | -1.851748 | -0.001814 | -0.218203 | 0.548263 | -0.521851 | 1.253720 | 0.882518 | -0.892913 | 0.218184 | 1 | 0 |
| 2 | 1.478029 | 0.664210 | 0.484232 | 0.450284 | -0.427587 | -0.448079 | -0.195272 | -0.610679 | -0.675633 | 0.759098 | -0.107303 | -0.440790 | -1.398093 | 0 | 0 |
| 3 | -0.846386 | -0.742706 | -0.734786 | -0.199585 | -0.328948 | -0.230911 | 0.620910 | 0.911236 | 1.274407 | 0.150882 | -0.603865 | -0.919849 | 0.386069 | 0 | 0 |
| 4 | -0.952033 | -0.794706 | -1.141199 | -0.070979 | 0.454453 | 0.544600 | 1.427005 | 1.918539 | 1.204102 | 0.076961 | -0.328712 | -1.051776 | -0.151007 | 0 | 0 |
| 5 | -1.244473 | 2.250723 | 2.321007 | 0.549219 | 1.971653 | -1.427849 | 0.392314 | -1.015093 | 0.157566 | 0.486970 | -0.455070 | 0.229936 | 0.185742 | 1 | 0 |
| 6 | -0.128652 | 0.958605 | 1.191477 | 0.705974 | 0.793937 | -0.709130 | -0.221572 | 0.922219 | 0.500475 | -0.571099 | 0.521526 | 0.567361 | -0.486761 | 1 | 0 |
| 7 | -1.148662 | 0.562286 | 0.636059 | 0.222732 | 0.741470 | 0.009108 | -0.949365 | -0.486631 | -0.519627 | -0.739600 | -0.415884 | -0.026850 | 0.290685 | 1 | 0 |
| 8 | -0.440190 | 0.461355 | -0.016542 | -0.158636 | 0.985626 | -0.417310 | 0.182134 | 0.290631 | 0.379949 | 0.069159 | 1.080013 | 0.220566 | -0.042505 | 1 | 0 |
| 9 | -0.695730 | 0.398404 | 1.069978 | -0.124019 | 0.736597 | -0.912452 | 0.673156 | 0.604840 | 0.175505 | 0.496158 | 0.541788 | 0.649837 | -0.680054 | 1 | 0 |
| 10 | -0.006662 | 0.163832 | 1.373872 | -0.095120 | 1.621755 | 1.048509 | 0.997122 | 0.721763 | 0.660834 | -1.076324 | 0.925997 | -0.147393 | -0.420465 | 1 | 0 |
| 11 | -0.771763 | -0.484525 | -0.874411 | 0.647747 | -1.241650 | 0.190918 | 0.457290 | 0.915208 | 1.999689 | 1.879761 | 0.491598 | -0.164372 | -0.560754 | 1 | 0 |
| 12 | 0.140770 | 1.869847 | -1.926303 | -2.491201 | -2.679759 | -1.527330 | -0.299345 | -0.550878 | 0.702947 | 0.143961 | 0.034796 | -0.379551 | -0.422354 | 0 | 0 |
| 13 | -1.952477 | -0.949813 | 0.063314 | 1.188657 | 1.059601 | 1.221319 | 0.070346 | 2.284107 | 2.889527 | 2.012105 | 1.053494 | -0.178905 | -2.004333 | 1 | 0 |
| 14 | -0.895529 | 0.398850 | -0.469782 | 1.216393 | 0.657294 | -0.550619 | -0.854637 | -0.815454 | 1.929689 | 1.499328 | -0.096775 | -0.174183 | -1.119396 | 1 | 0 |
| 15 | -1.161372 | 1.475106 | 1.486594 | 0.127516 | 0.213940 | 0.587080 | -0.789652 | 0.130203 | 1.199389 | 1.458358 | 0.404206 | 0.754289 | -0.784214 | 1 | 0 |
| 16 | -0.476792 | 2.179287 | 0.101035 | -1.393755 | -0.740834 | 0.589666 | 0.873850 | 0.630539 | 0.535702 | 0.387326 | -0.979677 | 0.259755 | 0.313358 | 1 | 0 |
| 17 | -0.089088 | -0.841832 | 0.674093 | -0.842623 | 0.904577 | -1.476862 | 1.853427 | -1.108621 | 0.720923 | 0.383320 | -1.842030 | 1.712321 | -1.612726 | 0 | 0 |
| 18 | -1.772732 | 0.488101 | 0.057829 | 0.041074 | 0.732429 | 1.052187 | 0.279830 | -0.350521 | -0.476338 | -0.833438 | 0.184849 | -0.055428 | 0.627307 | 1 | 0 |
| 19 | -0.640351 | 0.068493 | 0.619966 | -0.599171 | 0.860806 | -0.385120 | 1.955087 | -1.014740 | 1.224043 | 1.450896 | -2.604448 | 2.187869 | -0.464774 | 1 | 0 |
| 20 | 0.590240 | 0.699904 | -0.097902 | 0.127319 | -0.882999 | 0.319144 | -0.146142 | -0.540616 | 0.300593 | 0.688863 | 0.314647 | 0.709538 | 0.572811 | 1 | 0 |
| 21 | 0.500240 | 0.875222 | -0.833826 | 0.377484 | 0.023480 | 1.321472 | 1.094037 | 0.734507 | 0.141947 | 0.214524 | 0.508556 | -0.265911 | -0.372316 | 1 | 0 |
| 22 | -0.076653 | 0.518030 | 0.003390 | 0.452969 | -0.218736 | 0.115409 | 0.332618 | 0.611098 | 0.211893 | -0.206368 | 0.358363 | 0.614915 | 0.518172 | 1 | 0 |
| 23 | 0.010763 | -0.352873 | -0.460051 | 0.423968 | -0.228393 | -0.040296 | -0.740869 | -0.810034 | -1.379366 | -0.179024 | 0.147810 | -0.224826 | 0.615011 | 1 | 0 |
| 24 | 0.874600 | 0.173728 | -1.041125 | 0.845285 | 1.139221 | 0.264458 | -0.378878 | 0.430226 | -0.568469 | -1.237333 | 0.032074 | 0.812111 | 0.431460 | 1 | 0 |
| 25 | 0.200637 | 0.337376 | 0.022126 | 1.189135 | -0.210135 | -1.195492 | 0.067874 | 1.349711 | -0.534365 | -0.132754 | 0.055132 | 0.239009 | -0.275633 | 1 | 0 |
| 26 | 0.362627 | 0.159292 | -1.211688 | -0.555502 | 0.107540 | 0.797027 | -0.246321 | -1.113565 | -1.373054 | -2.369077 | -0.539483 | 1.032005 | 1.637730 | 1 | 0 |
| 27 | -0.504648 | -0.561515 | -2.173809 | -1.525691 | -0.810132 | -0.617474 | 0.441103 | 1.146056 | 1.464488 | -1.111032 | -0.742722 | 0.034623 | 0.200147 | 0 | 0 |
| 28 | -0.339646 | -2.140319 | -1.409226 | -0.207553 | -1.216547 | -1.135346 | -0.831817 | 1.136334 | -0.187159 | 1.388841 | 0.282573 | -0.807850 | -0.371992 | 0 | 0 |
| 29 | -1.279089 | 1.555887 | 0.890503 | 2.134195 | 0.337580 | -0.037382 | -2.046955 | -2.888113 | 1.329665 | 1.436687 | -1.576201 | 0.485256 | 1.429246 | 1 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 279 | -1.132789 | -0.931481 | -0.350024 | -0.228575 | -1.201208 | -1.044342 | 0.532403 | 1.667036 | 1.383485 | -0.967474 | -0.286625 | -1.920618 | -0.797190 | 0 | 1 |
| 280 | -0.375948 | 0.058369 | 0.489068 | 0.862825 | -1.876102 | -0.195043 | -1.163295 | 0.716190 | 0.384576 | -0.168340 | 1.542126 | -0.769460 | 0.456686 | 1 | 1 |
| 281 | 0.412883 | -1.703432 | -0.514845 | -1.382818 | -0.713972 | -0.476089 | 1.471006 | 0.826485 | 0.508608 | -1.311788 | -2.010635 | -1.122699 | -0.848851 | 0 | 1 |
| 282 | -0.152329 | -2.012108 | -0.217355 | -1.122627 | -0.851075 | 0.634424 | 1.711007 | 0.281350 | -0.565156 | -1.667195 | -1.942452 | -1.586592 | -0.485128 | 0 | 1 |
| 283 | 0.348443 | -2.381428 | 1.267515 | -1.713290 | 0.161262 | -1.589515 | 1.383857 | -0.218429 | 0.412550 | 0.382171 | -1.073499 | -1.745128 | -3.227845 | 0 | 1 |
| 284 | -0.895866 | 1.001673 | 1.059356 | 0.166883 | -0.710729 | 0.466737 | -0.857566 | -0.158962 | 0.004241 | 0.391823 | 0.576231 | 0.329506 | -1.331272 | 1 | 1 |
| 285 | 0.417102 | 1.957515 | 2.350604 | -1.125042 | -2.206390 | -0.674814 | -1.217854 | 0.372865 | 0.840465 | -0.472910 | 0.310419 | 1.379494 | 1.128412 | 1 | 1 |
| 286 | -0.900897 | -0.289100 | 0.433265 | -0.281829 | -0.379951 | 1.272236 | 0.313949 | -0.261980 | -0.053111 | 0.473694 | 0.493962 | -0.263293 | -0.657598 | 1 | 1 |
| 287 | -0.002448 | -0.853612 | 0.441903 | 0.406478 | -0.823085 | 0.590185 | -0.292046 | -0.079952 | -0.422138 | 0.579522 | -0.620415 | -0.298847 | 0.620798 | 1 | 1 |
| 288 | -0.528092 | -1.022206 | -0.348679 | 0.093718 | -1.642833 | -2.355166 | -0.992806 | -0.143423 | 0.270521 | 0.838321 | 0.843686 | 0.469574 | -0.325121 | 0 | 1 |
| 289 | -0.387248 | -1.305014 | -0.365540 | 0.202745 | -0.906016 | -1.785190 | -1.377992 | -0.544742 | -0.670979 | -0.785606 | 0.505505 | 0.502505 | -0.151297 | 0 | 1 |
| 290 | 0.425324 | -2.583173 | -2.181080 | -1.262030 | -0.179265 | 0.176164 | 1.763096 | 0.436737 | -2.048534 | -1.014266 | 1.298221 | 0.401742 | -1.080608 | 0 | 1 |
| 291 | -0.572282 | -0.375532 | -2.067885 | -0.361247 | -0.315065 | -0.671820 | -0.183865 | -0.517694 | -0.802956 | -0.951809 | 0.282442 | 0.208005 | -0.271252 | 0 | 1 |
| 292 | -0.084382 | -1.508230 | -0.105496 | -1.930204 | -1.529664 | -0.795467 | 1.273717 | -1.858542 | -0.446361 | -0.239346 | 0.154464 | -0.114937 | -1.831603 | 0 | 1 |
| 293 | -1.172703 | 0.783209 | -1.141589 | -0.982768 | -0.513216 | 0.655437 | 1.962510 | 0.628858 | 1.130028 | 1.104741 | 1.539591 | 1.547843 | -0.011302 | 1 | 1 |
| 294 | -1.293038 | 0.838303 | -1.049071 | -0.708031 | -0.779995 | 0.868108 | 1.621994 | 0.725495 | 1.173585 | 1.424395 | 1.751950 | 1.352876 | 0.339922 | 1 | 1 |
| 295 | 0.809878 | -0.351504 | -2.231752 | -0.556719 | -1.430264 | -0.357918 | -0.727837 | 1.110363 | 1.684188 | 0.429768 | 0.560061 | 0.371789 | -1.110030 | 0 | 1 |
| 296 | 0.305449 | -0.148924 | -0.727054 | -0.126830 | 0.467272 | 0.420013 | 1.212777 | 0.954055 | -0.988419 | -0.423614 | -0.047239 | 0.058678 | -0.031517 | 0 | 1 |
| 297 | 1.212224 | 1.916789 | 0.287969 | -0.073842 | 0.289112 | 0.943764 | -0.395404 | -0.380613 | 0.262567 | 0.759137 | 0.277177 | 0.493951 | 1.026995 | 1 | 1 |
| 298 | -0.121307 | 0.217217 | 0.030920 | -0.201270 | -0.752001 | -0.276070 | 0.835502 | -0.363704 | -0.641199 | 0.283313 | 0.060013 | 0.013280 | 0.477857 | 1 | 1 |
| 299 | -0.622824 | -0.595352 | 0.256282 | -0.111551 | 0.023990 | 1.221659 | 1.572998 | -0.263983 | -0.707828 | 0.707801 | 0.306249 | 1.046476 | 0.214979 | 1 | 1 |
| 300 | -0.667480 | -0.808638 | 0.730781 | 0.054549 | 0.191421 | 0.279885 | 0.088177 | 0.823617 | 0.604299 | 0.640274 | -0.360151 | 1.298688 | 0.494875 | 1 | 1 |
| 301 | 0.928382 | -2.375767 | -0.427528 | -0.852350 | -1.137004 | 1.584181 | -1.700220 | -2.060965 | -1.326622 | 0.451948 | 0.593212 | 0.152418 | -0.128797 | 0 | 1 |
| 302 | -0.483888 | 0.443846 | 0.129714 | 0.199624 | -0.106985 | 0.817702 | -0.072817 | -1.163918 | 0.545762 | -0.141320 | 0.041767 | -0.402181 | 0.061897 | 1 | 1 |
| 303 | 0.715769 | 0.780533 | 1.467750 | -0.595580 | -1.178484 | 4.014345 | -0.112339 | -1.611382 | -0.295511 | 0.032462 | 1.836607 | -4.315898 | -1.084441 | 1 | 1 |
| 304 | 0.041466 | -0.470275 | 0.234655 | 0.109532 | -0.518455 | -0.977540 | -0.613498 | -1.108545 | 0.500653 | -0.214143 | -0.033265 | -0.541673 | 0.714974 | 1 | 1 |
| 305 | 0.818747 | 0.495675 | 1.005686 | 0.967334 | 0.505171 | -0.579478 | -0.847677 | 1.574323 | 1.544556 | 0.412556 | -0.972040 | 0.290457 | 0.289042 | 1 | 1 |
| 306 | 1.062928 | -1.149587 | 1.951840 | -0.065775 | 0.546680 | 0.994901 | -1.817826 | 2.109742 | 0.264443 | 0.505287 | -0.757462 | 0.578677 | 0.222503 | 1 | 1 |
| 307 | -0.701621 | -0.049803 | -0.719153 | -0.048069 | 1.223251 | 1.913492 | 0.887449 | 0.038186 | 0.546172 | -0.568362 | -1.091833 | -0.250367 | 0.831399 | 1 | 1 |
| 308 | -0.079821 | 0.796085 | -0.215763 | -1.396439 | -0.133350 | 0.582037 | 2.442796 | 0.743250 | -1.182753 | -0.723658 | -0.879934 | -2.498899 | -1.532262 | 0 | 1 |
309 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82b505400>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[3]))
X = df_n_ps_std_mfcc[3].drop(columns='Cluster')
y = df_n_ps[3]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(139, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (30, 20, 10), 'learning_rate_init': 0.003, 'max_iter': 75}, que permiten obtener un Accuracy de 86.33% y un Kappa del 66.54
Tiempo total: 21.38 minutos
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_4" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_4 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_10 (Dense) (None, 30) 420 _________________________________________________________________ dense_11 (Dense) (None, 20) 620 _________________________________________________________________ dense_12 (Dense) (None, 10) 210 _________________________________________________________________ dense_13 (Dense) (None, 1) 11 ================================================================= Total params: 1,261 Trainable params: 1,261 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 139 samples, validate on 47 samples Epoch 1/75 139/139 [==============================] - 0s 2ms/step - loss: 0.7120 - accuracy: 0.4604 - val_loss: 0.6643 - val_accuracy: 0.6383 Epoch 2/75 139/139 [==============================] - 0s 101us/step - loss: 0.5930 - accuracy: 0.7410 - val_loss: 0.6515 - val_accuracy: 0.6809 Epoch 3/75 139/139 [==============================] - 0s 93us/step - loss: 0.5191 - accuracy: 0.7626 - val_loss: 0.6451 - val_accuracy: 0.7234 Epoch 4/75 139/139 [==============================] - 0s 86us/step - loss: 0.4626 - accuracy: 0.7842 - val_loss: 0.6396 - val_accuracy: 0.7872 Epoch 5/75 139/139 [==============================] - 0s 86us/step - loss: 0.4188 - accuracy: 0.8201 - val_loss: 0.6337 - val_accuracy: 0.7660 Epoch 6/75 139/139 [==============================] - 0s 93us/step - loss: 0.3836 - accuracy: 0.8489 - val_loss: 0.6403 - val_accuracy: 0.7660 Epoch 7/75 139/139 [==============================] - 0s 101us/step - loss: 0.3544 - accuracy: 0.8705 - val_loss: 0.6423 - val_accuracy: 0.7660 Epoch 8/75 139/139 [==============================] - 0s 101us/step - loss: 0.3328 - accuracy: 0.8705 - val_loss: 0.6390 - val_accuracy: 0.7660 Epoch 9/75 139/139 [==============================] - 0s 101us/step - loss: 0.3117 - accuracy: 0.8777 - val_loss: 0.6345 - val_accuracy: 0.7660 Epoch 10/75 139/139 [==============================] - 0s 93us/step - loss: 0.3017 - accuracy: 0.8705 - val_loss: 0.6233 - val_accuracy: 0.7660 Epoch 11/75 139/139 [==============================] - 0s 129us/step - loss: 0.2924 - accuracy: 0.8849 - val_loss: 0.6335 - val_accuracy: 0.7872 Epoch 12/75 139/139 [==============================] - 0s 93us/step - loss: 0.2825 - accuracy: 0.8777 - val_loss: 0.6348 - val_accuracy: 0.7872 Epoch 13/75 139/139 [==============================] - 0s 86us/step - loss: 0.2727 - accuracy: 0.8777 - val_loss: 0.6450 - val_accuracy: 0.7872 Epoch 14/75 139/139 [==============================] - 0s 93us/step - loss: 0.2655 - accuracy: 0.8993 - val_loss: 0.6622 - val_accuracy: 0.7872 Epoch 00014: ReduceLROnPlateau reducing learning rate to 0.001500000013038516. Epoch 15/75 139/139 [==============================] - 0s 93us/step - loss: 0.2583 - accuracy: 0.9065 - val_loss: 0.6709 - val_accuracy: 0.7872 Epoch 16/75 139/139 [==============================] - 0s 86us/step - loss: 0.2544 - accuracy: 0.9065 - val_loss: 0.6659 - val_accuracy: 0.7872 Epoch 17/75 139/139 [==============================] - 0s 101us/step - loss: 0.2510 - accuracy: 0.9065 - val_loss: 0.6681 - val_accuracy: 0.7660 Epoch 18/75 139/139 [==============================] - 0s 93us/step - loss: 0.2473 - accuracy: 0.9137 - val_loss: 0.6822 - val_accuracy: 0.7660 Epoch 19/75 139/139 [==============================] - 0s 86us/step - loss: 0.2434 - accuracy: 0.9137 - val_loss: 0.6909 - val_accuracy: 0.7660 Epoch 20/75 139/139 [==============================] - 0s 79us/step - loss: 0.2402 - accuracy: 0.9137 - val_loss: 0.6924 - val_accuracy: 0.7660 Epoch 21/75 139/139 [==============================] - 0s 137us/step - loss: 0.2373 - accuracy: 0.9137 - val_loss: 0.6904 - val_accuracy: 0.7660 Epoch 22/75 139/139 [==============================] - 0s 101us/step - loss: 0.2347 - accuracy: 0.9137 - val_loss: 0.6874 - val_accuracy: 0.7660 Epoch 23/75 139/139 [==============================] - 0s 93us/step - loss: 0.2313 - accuracy: 0.9137 - val_loss: 0.6854 - val_accuracy: 0.7660 Epoch 24/75 139/139 [==============================] - 0s 86us/step - loss: 0.2278 - accuracy: 0.9137 - val_loss: 0.6835 - val_accuracy: 0.7660 Epoch 00024: ReduceLROnPlateau reducing learning rate to 0.000750000006519258. Epoch 25/75 139/139 [==============================] - 0s 86us/step - loss: 0.2249 - accuracy: 0.9137 - val_loss: 0.6845 - val_accuracy: 0.7660 Epoch 26/75 139/139 [==============================] - 0s 101us/step - loss: 0.2242 - accuracy: 0.9137 - val_loss: 0.6798 - val_accuracy: 0.7660 Epoch 27/75 139/139 [==============================] - 0s 93us/step - loss: 0.2229 - accuracy: 0.9137 - val_loss: 0.6763 - val_accuracy: 0.7660 Epoch 28/75 139/139 [==============================] - 0s 101us/step - loss: 0.2213 - accuracy: 0.9137 - val_loss: 0.6758 - val_accuracy: 0.7660 Epoch 29/75 139/139 [==============================] - 0s 86us/step - loss: 0.2198 - accuracy: 0.9137 - val_loss: 0.6758 - val_accuracy: 0.7660 Epoch 30/75 139/139 [==============================] - 0s 86us/step - loss: 0.2179 - accuracy: 0.9209 - val_loss: 0.6753 - val_accuracy: 0.7660 Epoch 31/75 139/139 [==============================] - 0s 79us/step - loss: 0.2163 - accuracy: 0.9209 - val_loss: 0.6761 - val_accuracy: 0.7660 Epoch 32/75 139/139 [==============================] - 0s 86us/step - loss: 0.2150 - accuracy: 0.9209 - val_loss: 0.6781 - val_accuracy: 0.7660 Epoch 33/75 139/139 [==============================] - 0s 122us/step - loss: 0.2136 - accuracy: 0.9281 - val_loss: 0.6806 - val_accuracy: 0.7660 Epoch 34/75 139/139 [==============================] - 0s 158us/step - loss: 0.2121 - accuracy: 0.9281 - val_loss: 0.6834 - val_accuracy: 0.7660 Epoch 00034: ReduceLROnPlateau reducing learning rate to 0.000375000003259629. Epoch 35/75 139/139 [==============================] - 0s 129us/step - loss: 0.2106 - accuracy: 0.9281 - val_loss: 0.6837 - val_accuracy: 0.7660 Epoch 36/75 139/139 [==============================] - 0s 108us/step - loss: 0.2099 - accuracy: 0.9281 - val_loss: 0.6845 - val_accuracy: 0.7660 Epoch 37/75 139/139 [==============================] - 0s 108us/step - loss: 0.2092 - accuracy: 0.9281 - val_loss: 0.6846 - val_accuracy: 0.7660 Epoch 38/75 139/139 [==============================] - 0s 93us/step - loss: 0.2085 - accuracy: 0.9281 - val_loss: 0.6834 - val_accuracy: 0.7660 Epoch 39/75 139/139 [==============================] - 0s 93us/step - loss: 0.2078 - accuracy: 0.9281 - val_loss: 0.6844 - val_accuracy: 0.7660 Epoch 40/75 139/139 [==============================] - 0s 93us/step - loss: 0.2070 - accuracy: 0.9281 - val_loss: 0.6853 - val_accuracy: 0.7660 Epoch 41/75 139/139 [==============================] - 0s 93us/step - loss: 0.2064 - accuracy: 0.9281 - val_loss: 0.6865 - val_accuracy: 0.7660 Epoch 42/75 139/139 [==============================] - 0s 93us/step - loss: 0.2055 - accuracy: 0.9281 - val_loss: 0.6884 - val_accuracy: 0.7660 Epoch 43/75 139/139 [==============================] - 0s 93us/step - loss: 0.2050 - accuracy: 0.9281 - val_loss: 0.6905 - val_accuracy: 0.7660 Epoch 44/75 139/139 [==============================] - 0s 93us/step - loss: 0.2046 - accuracy: 0.9281 - val_loss: 0.6924 - val_accuracy: 0.7660 Epoch 00044: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145. Epoch 45/75 139/139 [==============================] - 0s 129us/step - loss: 0.2036 - accuracy: 0.9281 - val_loss: 0.6908 - val_accuracy: 0.7660 Epoch 46/75 139/139 [==============================] - 0s 108us/step - loss: 0.2031 - accuracy: 0.9281 - val_loss: 0.6889 - val_accuracy: 0.7660 Epoch 47/75 139/139 [==============================] - 0s 86us/step - loss: 0.2027 - accuracy: 0.9281 - val_loss: 0.6871 - val_accuracy: 0.7660 Epoch 48/75 139/139 [==============================] - 0s 72us/step - loss: 0.2024 - accuracy: 0.9281 - val_loss: 0.6861 - val_accuracy: 0.7660 Epoch 49/75 139/139 [==============================] - 0s 93us/step - loss: 0.2021 - accuracy: 0.9281 - val_loss: 0.6861 - val_accuracy: 0.7660 Epoch 50/75 139/139 [==============================] - 0s 79us/step - loss: 0.2016 - accuracy: 0.9281 - val_loss: 0.6864 - val_accuracy: 0.7660 Epoch 51/75 139/139 [==============================] - 0s 86us/step - loss: 0.2013 - accuracy: 0.9281 - val_loss: 0.6864 - val_accuracy: 0.7660 Epoch 52/75 139/139 [==============================] - 0s 79us/step - loss: 0.2010 - accuracy: 0.9281 - val_loss: 0.6871 - val_accuracy: 0.7660 Epoch 53/75 139/139 [==============================] - 0s 79us/step - loss: 0.2006 - accuracy: 0.9281 - val_loss: 0.6876 - val_accuracy: 0.7660 Epoch 54/75 139/139 [==============================] - 0s 72us/step - loss: 0.2003 - accuracy: 0.9281 - val_loss: 0.6873 - val_accuracy: 0.7660 Epoch 00054: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05. Epoch 55/75 139/139 [==============================] - 0s 72us/step - loss: 0.1998 - accuracy: 0.9281 - val_loss: 0.6875 - val_accuracy: 0.7660 Epoch 56/75 139/139 [==============================] - 0s 122us/step - loss: 0.1997 - accuracy: 0.9281 - val_loss: 0.6872 - val_accuracy: 0.7660 Epoch 57/75 139/139 [==============================] - 0s 86us/step - loss: 0.1995 - accuracy: 0.9281 - val_loss: 0.6871 - val_accuracy: 0.7660 Epoch 58/75 139/139 [==============================] - 0s 79us/step - loss: 0.1993 - accuracy: 0.9281 - val_loss: 0.6874 - val_accuracy: 0.7660 Epoch 59/75 139/139 [==============================] - 0s 79us/step - loss: 0.1991 - accuracy: 0.9281 - val_loss: 0.6880 - val_accuracy: 0.7660 Epoch 60/75 139/139 [==============================] - 0s 72us/step - loss: 0.1990 - accuracy: 0.9281 - val_loss: 0.6874 - val_accuracy: 0.7660 Epoch 61/75 139/139 [==============================] - 0s 79us/step - loss: 0.1987 - accuracy: 0.9281 - val_loss: 0.6869 - val_accuracy: 0.7660 Epoch 62/75 139/139 [==============================] - 0s 79us/step - loss: 0.1986 - accuracy: 0.9281 - val_loss: 0.6867 - val_accuracy: 0.7660 Epoch 63/75 139/139 [==============================] - 0s 72us/step - loss: 0.1983 - accuracy: 0.9281 - val_loss: 0.6867 - val_accuracy: 0.7660 Epoch 64/75 139/139 [==============================] - 0s 79us/step - loss: 0.1981 - accuracy: 0.9281 - val_loss: 0.6868 - val_accuracy: 0.7660 Epoch 00064: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05. Epoch 65/75 139/139 [==============================] - 0s 79us/step - loss: 0.1980 - accuracy: 0.9281 - val_loss: 0.6868 - val_accuracy: 0.7660 Epoch 66/75 139/139 [==============================] - 0s 79us/step - loss: 0.1979 - accuracy: 0.9281 - val_loss: 0.6867 - val_accuracy: 0.7660 Epoch 67/75 139/139 [==============================] - 0s 79us/step - loss: 0.1978 - accuracy: 0.9281 - val_loss: 0.6866 - val_accuracy: 0.7660 Epoch 68/75 139/139 [==============================] - 0s 101us/step - loss: 0.1977 - accuracy: 0.9281 - val_loss: 0.6866 - val_accuracy: 0.7660 Epoch 69/75 139/139 [==============================] - 0s 122us/step - loss: 0.1977 - accuracy: 0.9281 - val_loss: 0.6867 - val_accuracy: 0.7660 Epoch 70/75 139/139 [==============================] - 0s 86us/step - loss: 0.1975 - accuracy: 0.9281 - val_loss: 0.6870 - val_accuracy: 0.7660 Epoch 71/75 139/139 [==============================] - 0s 86us/step - loss: 0.1974 - accuracy: 0.9281 - val_loss: 0.6871 - val_accuracy: 0.7660 Epoch 72/75 139/139 [==============================] - 0s 72us/step - loss: 0.1973 - accuracy: 0.9281 - val_loss: 0.6873 - val_accuracy: 0.7660 Epoch 73/75 139/139 [==============================] - 0s 79us/step - loss: 0.1972 - accuracy: 0.9281 - val_loss: 0.6872 - val_accuracy: 0.7660 Epoch 74/75 139/139 [==============================] - 0s 79us/step - loss: 0.1972 - accuracy: 0.9281 - val_loss: 0.6873 - val_accuracy: 0.7660 Epoch 00074: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05. Epoch 75/75 139/139 [==============================] - 0s 93us/step - loss: 0.1971 - accuracy: 0.9281 - val_loss: 0.6875 - val_accuracy: 0.7660
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 75)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
47/47 [==============================] - 0s 106us/step test loss: 0.687487561017909, test accuracy: 0.7659574747085571
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.7903225806451613
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.396732788798133
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.311006 | 1.696486 | 0.912001 | -0.211934 | -0.513557 | 1.357699 | 0.777385 | 0.508585 | -2.290902 | -2.422249 | -0.738438 | -2.221933 | -1.191363 |
| 1 | 0.947147 | -0.576741 | -1.258913 | -0.786859 | 0.887961 | -1.895175 | -0.310427 | -0.374360 | 1.478062 | 0.972075 | -1.105984 | 0.612318 | -1.486887 |
| 2 | -0.115048 | 1.257050 | 0.043002 | -2.677464 | 3.902183 | -1.091787 | 0.505797 | 2.341684 | -2.345224 | -1.678088 | -2.608854 | -2.617777 | -2.135652 |
| 3 | 0.621032 | 1.417449 | 1.399722 | -0.625673 | 1.012110 | 0.230671 | -0.287988 | 1.012771 | -2.250326 | -0.340971 | -0.353905 | -0.717440 | -0.390485 |
| 4 | 0.340978 | 1.662814 | -1.775422 | 0.156552 | 1.678811 | 0.301711 | 2.038462 | 1.511985 | 1.508787 | -2.046602 | 0.741073 | -0.282747 | -0.779814 |
| 5 | 0.426765 | -1.056701 | -1.244088 | -0.696846 | -0.372415 | -0.847420 | -0.209607 | 1.462924 | -0.541420 | 0.000628 | -1.135148 | 1.608546 | 1.709532 |
| 6 | 0.210857 | -1.779497 | -2.206121 | -0.832640 | 0.636169 | -1.979858 | -0.510102 | 1.437770 | 0.128209 | 0.025521 | 0.184211 | 2.300204 | 0.912793 |
| 7 | -0.821293 | -0.049796 | 0.237440 | 0.379918 | 0.714133 | 0.670070 | 0.122605 | -0.069298 | -0.126759 | -0.199559 | 0.547891 | -0.099623 | -0.024895 |
| 8 | 0.420103 | -0.662020 | -0.550543 | -0.566406 | -0.923203 | -0.295152 | -0.533234 | 0.927026 | 0.119135 | 0.218761 | -0.245778 | 0.627242 | 1.313952 |
| 9 | -1.436247 | 0.435343 | 2.482690 | 1.099668 | -0.392845 | 0.565039 | 0.569531 | -0.088218 | -0.131137 | -0.699769 | -0.538549 | -0.329443 | 0.942919 |
| 10 | -1.574051 | -1.334372 | -1.636184 | 1.768991 | -0.369456 | -0.008046 | -1.402331 | 0.012625 | 1.135935 | 1.623145 | -0.653935 | 0.182348 | 1.052310 |
| 11 | -1.798986 | -1.632467 | -1.314854 | 2.656006 | -0.096678 | -0.174852 | -1.748372 | 0.185804 | 0.930317 | 0.365776 | -0.676448 | 0.358271 | 1.523770 |
| 12 | -0.708207 | 0.931180 | 0.258840 | -0.189291 | -0.204832 | -0.103872 | 0.221697 | -0.231695 | -0.003439 | 0.423528 | 1.259835 | 0.119625 | -0.192417 |
| 13 | -2.007033 | -0.288096 | 0.099713 | 0.390909 | 1.333138 | -0.069950 | 0.643074 | 0.172080 | -0.109666 | 0.304475 | -1.157528 | -1.708326 | -1.420079 |
| 14 | -0.497985 | 0.020592 | -0.123619 | 0.165046 | -0.765078 | -0.465219 | 0.172533 | 0.722853 | 0.284863 | -0.035284 | 0.024769 | -0.065990 | -0.992437 |
| 15 | 1.200625 | 0.984580 | -0.234312 | 0.348855 | 0.175663 | 0.309396 | 0.390611 | -0.745912 | -0.667554 | -0.052439 | 0.119610 | -0.862930 | 0.945979 |
| 16 | 0.435253 | 3.280178 | 0.407736 | 1.143148 | 2.291571 | 0.546530 | 0.170667 | 0.427708 | -0.063936 | -0.532360 | 0.404150 | 0.415849 | 0.869331 |
| 17 | -0.398944 | 0.035026 | -1.634042 | -1.354378 | 0.854385 | 1.406182 | -0.773335 | 0.663902 | 0.928496 | 1.278830 | 0.464511 | 0.235475 | -0.040374 |
| 18 | -0.454008 | -0.234096 | -0.930672 | -0.507506 | 0.545773 | 0.437756 | 1.026910 | 0.013959 | -0.620099 | -0.593763 | 1.073690 | 0.594340 | 0.987056 |
| 19 | 0.149846 | 0.062252 | -0.002122 | 0.786346 | 0.810930 | 0.304880 | -0.882886 | -0.043156 | 2.503584 | 0.894947 | 0.394981 | 0.761651 | 0.402963 |
| 20 | -0.314274 | 0.446482 | 0.889744 | 0.891114 | 1.249237 | 0.718469 | 0.296834 | -0.831548 | -0.393364 | -0.103574 | 0.295790 | 0.092061 | 0.424633 |
| 21 | 0.659365 | 1.053258 | -0.877939 | -0.295954 | -1.122110 | -0.035202 | 1.512616 | 0.031457 | -0.700740 | -1.687204 | -1.136215 | -1.545451 | -0.082548 |
| 22 | 0.568507 | -0.357318 | -1.183577 | -0.069205 | 0.462644 | -0.956011 | 0.501504 | 0.240708 | -0.025482 | 0.416003 | 0.237690 | -0.566935 | -0.846151 |
| 23 | 0.696474 | 0.477607 | -1.637469 | -1.158983 | -2.224208 | -1.861929 | -0.176558 | 0.694585 | 0.426826 | -0.088376 | -0.335290 | 1.125320 | 0.705700 |
| 24 | -0.221795 | -0.513464 | -0.506448 | 0.594506 | 0.033232 | -1.141879 | -1.582503 | -0.081204 | -0.001962 | -0.704687 | -0.473528 | 0.580117 | 1.533686 |
| 25 | 0.036099 | -0.007586 | 0.116729 | 0.438081 | -1.526141 | -1.994283 | -1.014100 | 0.028630 | -0.553238 | -0.540795 | 0.467730 | 0.943285 | 0.498193 |
| 26 | -0.291576 | -0.372192 | -1.176599 | 0.078535 | 0.516288 | -1.851892 | -2.218803 | 0.335200 | 0.323222 | 0.006649 | 0.017717 | 0.133172 | 1.208725 |
| 27 | 0.953536 | 0.427304 | -0.554063 | 0.425439 | 1.368674 | 0.362392 | 0.477030 | -0.976616 | -0.382390 | 0.310619 | -0.903078 | -0.943886 | -0.047616 |
| 28 | -1.172014 | 1.307258 | -1.059323 | -0.655908 | 1.591107 | 0.483432 | 0.474862 | 0.348014 | -0.527448 | 0.798802 | -0.075253 | 1.943808 | 0.108268 |
| 29 | -0.954427 | 0.000731 | -0.367958 | 0.281024 | 0.303337 | 0.744504 | 1.271647 | 0.298340 | -0.057042 | -0.297712 | -0.053703 | -0.045043 | -0.561554 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 156 | 0.232363 | -1.167339 | -0.114632 | 1.240724 | -0.209611 | 0.597503 | -0.105216 | -0.393018 | -0.168804 | -0.038601 | 0.602075 | -0.482733 | 0.192333 |
| 157 | -1.686193 | -0.806140 | -0.531342 | -0.411912 | 0.312945 | 0.751058 | 0.624837 | -0.394463 | 0.549120 | -1.174079 | -1.374572 | -1.950144 | -0.652535 |
| 158 | 0.487798 | 1.116042 | -0.308817 | 0.175231 | -0.191701 | -0.682970 | 0.502123 | 0.749073 | 1.365476 | 0.198244 | 1.283992 | 0.132188 | 0.482532 |
| 159 | 1.049575 | 0.742765 | 0.000505 | 0.670386 | 0.235663 | -0.297404 | 0.891743 | 0.047729 | 0.086633 | 0.873400 | 0.552393 | 0.496793 | 0.659122 |
| 160 | 0.285967 | 0.602916 | -0.009050 | 0.802464 | 0.333031 | -1.182611 | 0.473870 | 0.896236 | 0.890391 | 0.208214 | 0.786475 | 0.044481 | -0.114927 |
| 161 | 2.568510 | -0.180837 | 0.794882 | 1.410838 | 0.898076 | 0.468184 | 0.963255 | 0.338074 | 2.081580 | 2.353196 | 0.146660 | -0.295606 | -0.020484 |
| 162 | 0.821849 | 0.906757 | 0.282262 | 0.304716 | -0.691824 | 0.772704 | 2.543328 | -0.404440 | 1.861464 | 1.635426 | 0.204673 | 0.084333 | 0.469447 |
| 163 | 2.581037 | 0.239015 | 1.212048 | 0.498566 | 0.095720 | 0.062469 | 3.463238 | 0.374969 | -0.054235 | -0.365031 | -0.169020 | 1.160964 | 0.666076 |
| 164 | -0.247271 | -0.874145 | -0.840584 | 0.233138 | 0.034101 | 0.259892 | 0.144353 | -0.570094 | 1.244117 | 0.282845 | 0.127444 | -0.721587 | -1.450860 |
| 165 | 0.188979 | -0.519200 | 0.108496 | -0.513645 | -0.637646 | 0.812515 | 0.626360 | -0.156977 | -0.092241 | -0.517923 | 0.026563 | -0.597616 | -0.101096 |
| 166 | -0.015438 | -0.656621 | -0.739614 | 0.302131 | 0.583862 | 0.465267 | 0.342075 | -0.318902 | 0.221544 | 0.654368 | 0.777463 | -0.462212 | -0.867288 |
| 167 | -1.567081 | -1.052883 | -0.417918 | 0.636963 | -0.531279 | 0.787238 | -1.913461 | -0.020653 | -0.111129 | 0.112259 | -0.380422 | 0.497894 | 0.709826 |
| 168 | -1.883530 | -0.172892 | -0.340073 | -0.255266 | -0.480237 | -0.061425 | -0.158589 | -0.308725 | -0.034923 | 0.150845 | 0.696367 | 0.704196 | 0.473391 |
| 169 | -1.577057 | -0.602693 | 0.448785 | 1.073850 | -0.714538 | 1.427240 | -1.645225 | 0.812069 | -0.019466 | -0.719024 | -0.991241 | 0.521497 | 0.461555 |
| 170 | 0.667824 | -0.298287 | -0.412356 | -1.154598 | 0.171532 | -0.341146 | -0.411827 | -1.296671 | 0.428160 | -0.233124 | 0.035111 | -0.934501 | 0.123701 |
| 171 | 0.626343 | -0.046406 | -0.168999 | -1.278941 | 0.502027 | -0.060296 | 0.062571 | -1.284727 | 0.028380 | -0.488598 | -0.255008 | -1.199407 | 0.088115 |
| 172 | -0.441233 | -0.987406 | 0.015664 | -2.982951 | -0.379800 | 1.485748 | -2.174788 | -3.521120 | -0.191502 | 2.486906 | 2.771782 | 0.789523 | 0.553119 |
| 173 | 0.331514 | 0.334707 | -0.187508 | 0.489055 | 1.133140 | 1.016598 | -0.572753 | -0.634721 | -0.567790 | -0.492410 | 0.634219 | -0.122575 | -0.205540 |
| 174 | 0.480131 | 0.345645 | 0.202709 | -0.423456 | 1.184414 | 2.116965 | -0.463050 | 0.212550 | 1.973473 | -0.996794 | 0.924229 | -0.170049 | -0.068464 |
| 175 | 1.039410 | -0.773764 | 0.113739 | -0.796036 | -1.053802 | -1.238009 | 0.153897 | 0.497600 | 1.347261 | 0.972165 | 0.993095 | 0.066125 | -0.069772 |
| 176 | 0.194148 | -0.229033 | -0.571129 | -0.704359 | -0.204400 | -0.273049 | 1.105329 | 0.052851 | -0.360196 | 0.099095 | 1.063628 | -0.266594 | 0.041526 |
| 177 | -0.169188 | 0.025195 | -0.189648 | 0.376353 | 0.802036 | -1.185140 | 0.488985 | 0.244963 | 1.305631 | 0.241661 | 0.402415 | -0.494815 | 0.252206 |
| 178 | 1.433007 | 0.217051 | -0.388425 | -1.158798 | 2.068592 | 1.278810 | -1.193547 | -0.909321 | -0.207122 | 2.062093 | 1.374797 | 0.383804 | 1.569650 |
| 179 | 1.371536 | 0.624596 | -0.082552 | 0.444824 | 1.959112 | -0.736647 | -1.448177 | 0.624897 | 1.304939 | -0.025270 | -0.519401 | 0.592135 | 1.312240 |
| 180 | 0.815061 | -1.210119 | 0.844643 | -1.152602 | -0.216878 | -1.573232 | -0.065062 | 2.136014 | -0.285964 | 1.827988 | -0.982121 | 1.139199 | 0.936226 |
| 181 | 0.814962 | -1.028970 | -1.340094 | -1.579784 | 0.774822 | -0.351654 | -2.148181 | 2.772395 | 1.638263 | -0.394371 | 1.796246 | 1.182459 | 0.824064 |
| 182 | 1.615277 | 0.706391 | -0.611277 | 0.513438 | 0.987249 | 1.226124 | 0.240966 | 0.485917 | 1.355615 | -0.480955 | -0.255325 | -0.370864 | 0.107591 |
| 183 | 0.290224 | 0.578762 | 0.024629 | 0.119894 | 0.626180 | 1.025427 | 0.180541 | -0.504388 | -1.085411 | -1.413825 | 0.811722 | 0.640653 | 0.433677 |
| 184 | 0.086408 | -1.394139 | -0.501233 | 1.251905 | -0.481983 | 0.026482 | -1.317983 | -0.580623 | -0.160381 | -0.718194 | 0.110108 | -0.183905 | 0.074891 |
| 185 | 0.024909 | -0.713904 | -1.235134 | -0.194562 | 0.155358 | -0.586587 | -0.455970 | 0.577457 | 1.172268 | 0.468799 | 0.500130 | 1.133624 | 0.192845 |
186 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[2418.0, 2172.1027618675616, 1998.025251732407, 1881.5714074571665, 1790.4988909951144, 1710.5509624175916, 1638.1160961205187, 1594.2979605381602, 1541.5219098889218, 1492.348355544395, 1435.6626781919845, 1397.526425851007, 1339.3046499586067, 1333.0833102955885]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82cc61860>]
K=3
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 2, 1, 1, 0, 2, 2, 0, 2, 0, 2, 2, 0, 1, 1, 0, 0, 2, 0, 0, 0, 1,
1, 2, 2, 2, 2, 0, 0, 1, 2, 2, 2, 1, 1, 0, 2, 2, 2, 0, 2, 1, 1, 1,
1, 2, 0, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 0, 2, 1, 2,
2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 1, 0, 2, 1, 0, 2, 1, 0, 1, 2, 2,
0, 1, 1, 0, 1, 0, 2, 2, 2, 1, 2, 2, 0, 1, 2, 1, 2, 2, 2, 1, 1, 1,
2, 2, 2, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 2, 1, 1, 0, 2, 1, 1, 0,
0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 1, 2, 2, 1, 0, 2, 2, 1, 0, 1, 0,
1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 0, 0, 2,
1, 0, 0, 0, 2, 2, 0, 0, 2, 2])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 2, 1, 1, 0, 2, 2, 0, 2, 0, 2, 2, 0, 1, 1, 0, 0, 2, 0, 0, 0, 1,
1, 2, 2, 2, 2, 0, 0, 1, 2, 2, 2, 1, 1, 0, 2, 2, 2, 0, 2, 1, 1, 1,
1, 2, 0, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 0, 2, 1, 2,
2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 1, 0, 2, 1, 0, 2, 1, 0, 1, 2, 2,
0, 1, 1, 0, 1, 0, 2, 2, 2, 1, 2, 2, 0, 1, 2, 1, 2, 2, 2, 1, 1, 1,
2, 2, 2, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 2, 1, 1, 0, 2, 1, 1, 0,
0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 1, 2, 2, 1, 0, 2, 2, 1, 0, 1, 0,
1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 0, 0, 2,
1, 0, 0, 0, 2, 2, 0, 0, 2, 2])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.311006 | 1.696486 | 0.912001 | -0.211934 | -0.513557 | 1.357699 | 0.777385 | 0.508585 | -2.290902 | -2.422249 | -0.738438 | -2.221933 | -1.191363 | 1 | 0 |
| 1 | 0.947147 | -0.576741 | -1.258913 | -0.786859 | 0.887961 | -1.895175 | -0.310427 | -0.374360 | 1.478062 | 0.972075 | -1.105984 | 0.612318 | -1.486887 | 2 | 0 |
| 2 | -0.115048 | 1.257050 | 0.043002 | -2.677464 | 3.902183 | -1.091787 | 0.505797 | 2.341684 | -2.345224 | -1.678088 | -2.608854 | -2.617777 | -2.135652 | 1 | 0 |
| 3 | 0.621032 | 1.417449 | 1.399722 | -0.625673 | 1.012110 | 0.230671 | -0.287988 | 1.012771 | -2.250326 | -0.340971 | -0.353905 | -0.717440 | -0.390485 | 1 | 0 |
| 4 | 0.340978 | 1.662814 | -1.775422 | 0.156552 | 1.678811 | 0.301711 | 2.038462 | 1.511985 | 1.508787 | -2.046602 | 0.741073 | -0.282747 | -0.779814 | 0 | 0 |
| 5 | 0.426765 | -1.056701 | -1.244088 | -0.696846 | -0.372415 | -0.847420 | -0.209607 | 1.462924 | -0.541420 | 0.000628 | -1.135148 | 1.608546 | 1.709532 | 2 | 0 |
| 6 | 0.210857 | -1.779497 | -2.206121 | -0.832640 | 0.636169 | -1.979858 | -0.510102 | 1.437770 | 0.128209 | 0.025521 | 0.184211 | 2.300204 | 0.912793 | 2 | 0 |
| 7 | -0.821293 | -0.049796 | 0.237440 | 0.379918 | 0.714133 | 0.670070 | 0.122605 | -0.069298 | -0.126759 | -0.199559 | 0.547891 | -0.099623 | -0.024895 | 0 | 0 |
| 8 | 0.420103 | -0.662020 | -0.550543 | -0.566406 | -0.923203 | -0.295152 | -0.533234 | 0.927026 | 0.119135 | 0.218761 | -0.245778 | 0.627242 | 1.313952 | 2 | 0 |
| 9 | -1.436247 | 0.435343 | 2.482690 | 1.099668 | -0.392845 | 0.565039 | 0.569531 | -0.088218 | -0.131137 | -0.699769 | -0.538549 | -0.329443 | 0.942919 | 0 | 0 |
| 10 | -1.574051 | -1.334372 | -1.636184 | 1.768991 | -0.369456 | -0.008046 | -1.402331 | 0.012625 | 1.135935 | 1.623145 | -0.653935 | 0.182348 | 1.052310 | 2 | 0 |
| 11 | -1.798986 | -1.632467 | -1.314854 | 2.656006 | -0.096678 | -0.174852 | -1.748372 | 0.185804 | 0.930317 | 0.365776 | -0.676448 | 0.358271 | 1.523770 | 2 | 0 |
| 12 | -0.708207 | 0.931180 | 0.258840 | -0.189291 | -0.204832 | -0.103872 | 0.221697 | -0.231695 | -0.003439 | 0.423528 | 1.259835 | 0.119625 | -0.192417 | 0 | 0 |
| 13 | -2.007033 | -0.288096 | 0.099713 | 0.390909 | 1.333138 | -0.069950 | 0.643074 | 0.172080 | -0.109666 | 0.304475 | -1.157528 | -1.708326 | -1.420079 | 1 | 0 |
| 14 | -0.497985 | 0.020592 | -0.123619 | 0.165046 | -0.765078 | -0.465219 | 0.172533 | 0.722853 | 0.284863 | -0.035284 | 0.024769 | -0.065990 | -0.992437 | 1 | 0 |
| 15 | 1.200625 | 0.984580 | -0.234312 | 0.348855 | 0.175663 | 0.309396 | 0.390611 | -0.745912 | -0.667554 | -0.052439 | 0.119610 | -0.862930 | 0.945979 | 0 | 0 |
| 16 | 0.435253 | 3.280178 | 0.407736 | 1.143148 | 2.291571 | 0.546530 | 0.170667 | 0.427708 | -0.063936 | -0.532360 | 0.404150 | 0.415849 | 0.869331 | 0 | 0 |
| 17 | -0.398944 | 0.035026 | -1.634042 | -1.354378 | 0.854385 | 1.406182 | -0.773335 | 0.663902 | 0.928496 | 1.278830 | 0.464511 | 0.235475 | -0.040374 | 2 | 0 |
| 18 | -0.454008 | -0.234096 | -0.930672 | -0.507506 | 0.545773 | 0.437756 | 1.026910 | 0.013959 | -0.620099 | -0.593763 | 1.073690 | 0.594340 | 0.987056 | 0 | 0 |
| 19 | 0.149846 | 0.062252 | -0.002122 | 0.786346 | 0.810930 | 0.304880 | -0.882886 | -0.043156 | 2.503584 | 0.894947 | 0.394981 | 0.761651 | 0.402963 | 0 | 0 |
| 20 | -0.314274 | 0.446482 | 0.889744 | 0.891114 | 1.249237 | 0.718469 | 0.296834 | -0.831548 | -0.393364 | -0.103574 | 0.295790 | 0.092061 | 0.424633 | 0 | 0 |
| 21 | 0.659365 | 1.053258 | -0.877939 | -0.295954 | -1.122110 | -0.035202 | 1.512616 | 0.031457 | -0.700740 | -1.687204 | -1.136215 | -1.545451 | -0.082548 | 1 | 0 |
| 22 | 0.568507 | -0.357318 | -1.183577 | -0.069205 | 0.462644 | -0.956011 | 0.501504 | 0.240708 | -0.025482 | 0.416003 | 0.237690 | -0.566935 | -0.846151 | 1 | 0 |
| 23 | 0.696474 | 0.477607 | -1.637469 | -1.158983 | -2.224208 | -1.861929 | -0.176558 | 0.694585 | 0.426826 | -0.088376 | -0.335290 | 1.125320 | 0.705700 | 2 | 0 |
| 24 | -0.221795 | -0.513464 | -0.506448 | 0.594506 | 0.033232 | -1.141879 | -1.582503 | -0.081204 | -0.001962 | -0.704687 | -0.473528 | 0.580117 | 1.533686 | 2 | 0 |
| 25 | 0.036099 | -0.007586 | 0.116729 | 0.438081 | -1.526141 | -1.994283 | -1.014100 | 0.028630 | -0.553238 | -0.540795 | 0.467730 | 0.943285 | 0.498193 | 2 | 0 |
| 26 | -0.291576 | -0.372192 | -1.176599 | 0.078535 | 0.516288 | -1.851892 | -2.218803 | 0.335200 | 0.323222 | 0.006649 | 0.017717 | 0.133172 | 1.208725 | 2 | 0 |
| 27 | 0.953536 | 0.427304 | -0.554063 | 0.425439 | 1.368674 | 0.362392 | 0.477030 | -0.976616 | -0.382390 | 0.310619 | -0.903078 | -0.943886 | -0.047616 | 0 | 0 |
| 28 | -1.172014 | 1.307258 | -1.059323 | -0.655908 | 1.591107 | 0.483432 | 0.474862 | 0.348014 | -0.527448 | 0.798802 | -0.075253 | 1.943808 | 0.108268 | 0 | 0 |
| 29 | -0.954427 | 0.000731 | -0.367958 | 0.281024 | 0.303337 | 0.744504 | 1.271647 | 0.298340 | -0.057042 | -0.297712 | -0.053703 | -0.045043 | -0.561554 | 1 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 156 | 0.232363 | -1.167339 | -0.114632 | 1.240724 | -0.209611 | 0.597503 | -0.105216 | -0.393018 | -0.168804 | -0.038601 | 0.602075 | -0.482733 | 0.192333 | 0 | 1 |
| 157 | -1.686193 | -0.806140 | -0.531342 | -0.411912 | 0.312945 | 0.751058 | 0.624837 | -0.394463 | 0.549120 | -1.174079 | -1.374572 | -1.950144 | -0.652535 | 1 | 1 |
| 158 | 0.487798 | 1.116042 | -0.308817 | 0.175231 | -0.191701 | -0.682970 | 0.502123 | 0.749073 | 1.365476 | 0.198244 | 1.283992 | 0.132188 | 0.482532 | 0 | 1 |
| 159 | 1.049575 | 0.742765 | 0.000505 | 0.670386 | 0.235663 | -0.297404 | 0.891743 | 0.047729 | 0.086633 | 0.873400 | 0.552393 | 0.496793 | 0.659122 | 0 | 1 |
| 160 | 0.285967 | 0.602916 | -0.009050 | 0.802464 | 0.333031 | -1.182611 | 0.473870 | 0.896236 | 0.890391 | 0.208214 | 0.786475 | 0.044481 | -0.114927 | 0 | 1 |
| 161 | 2.568510 | -0.180837 | 0.794882 | 1.410838 | 0.898076 | 0.468184 | 0.963255 | 0.338074 | 2.081580 | 2.353196 | 0.146660 | -0.295606 | -0.020484 | 0 | 1 |
| 162 | 0.821849 | 0.906757 | 0.282262 | 0.304716 | -0.691824 | 0.772704 | 2.543328 | -0.404440 | 1.861464 | 1.635426 | 0.204673 | 0.084333 | 0.469447 | 0 | 1 |
| 163 | 2.581037 | 0.239015 | 1.212048 | 0.498566 | 0.095720 | 0.062469 | 3.463238 | 0.374969 | -0.054235 | -0.365031 | -0.169020 | 1.160964 | 0.666076 | 0 | 1 |
| 164 | -0.247271 | -0.874145 | -0.840584 | 0.233138 | 0.034101 | 0.259892 | 0.144353 | -0.570094 | 1.244117 | 0.282845 | 0.127444 | -0.721587 | -1.450860 | 1 | 1 |
| 165 | 0.188979 | -0.519200 | 0.108496 | -0.513645 | -0.637646 | 0.812515 | 0.626360 | -0.156977 | -0.092241 | -0.517923 | 0.026563 | -0.597616 | -0.101096 | 1 | 1 |
| 166 | -0.015438 | -0.656621 | -0.739614 | 0.302131 | 0.583862 | 0.465267 | 0.342075 | -0.318902 | 0.221544 | 0.654368 | 0.777463 | -0.462212 | -0.867288 | 1 | 1 |
| 167 | -1.567081 | -1.052883 | -0.417918 | 0.636963 | -0.531279 | 0.787238 | -1.913461 | -0.020653 | -0.111129 | 0.112259 | -0.380422 | 0.497894 | 0.709826 | 2 | 1 |
| 168 | -1.883530 | -0.172892 | -0.340073 | -0.255266 | -0.480237 | -0.061425 | -0.158589 | -0.308725 | -0.034923 | 0.150845 | 0.696367 | 0.704196 | 0.473391 | 2 | 1 |
| 169 | -1.577057 | -0.602693 | 0.448785 | 1.073850 | -0.714538 | 1.427240 | -1.645225 | 0.812069 | -0.019466 | -0.719024 | -0.991241 | 0.521497 | 0.461555 | 2 | 1 |
| 170 | 0.667824 | -0.298287 | -0.412356 | -1.154598 | 0.171532 | -0.341146 | -0.411827 | -1.296671 | 0.428160 | -0.233124 | 0.035111 | -0.934501 | 0.123701 | 1 | 1 |
| 171 | 0.626343 | -0.046406 | -0.168999 | -1.278941 | 0.502027 | -0.060296 | 0.062571 | -1.284727 | 0.028380 | -0.488598 | -0.255008 | -1.199407 | 0.088115 | 1 | 1 |
| 172 | -0.441233 | -0.987406 | 0.015664 | -2.982951 | -0.379800 | 1.485748 | -2.174788 | -3.521120 | -0.191502 | 2.486906 | 2.771782 | 0.789523 | 0.553119 | 2 | 1 |
| 173 | 0.331514 | 0.334707 | -0.187508 | 0.489055 | 1.133140 | 1.016598 | -0.572753 | -0.634721 | -0.567790 | -0.492410 | 0.634219 | -0.122575 | -0.205540 | 0 | 1 |
| 174 | 0.480131 | 0.345645 | 0.202709 | -0.423456 | 1.184414 | 2.116965 | -0.463050 | 0.212550 | 1.973473 | -0.996794 | 0.924229 | -0.170049 | -0.068464 | 0 | 1 |
| 175 | 1.039410 | -0.773764 | 0.113739 | -0.796036 | -1.053802 | -1.238009 | 0.153897 | 0.497600 | 1.347261 | 0.972165 | 0.993095 | 0.066125 | -0.069772 | 2 | 1 |
| 176 | 0.194148 | -0.229033 | -0.571129 | -0.704359 | -0.204400 | -0.273049 | 1.105329 | 0.052851 | -0.360196 | 0.099095 | 1.063628 | -0.266594 | 0.041526 | 1 | 1 |
| 177 | -0.169188 | 0.025195 | -0.189648 | 0.376353 | 0.802036 | -1.185140 | 0.488985 | 0.244963 | 1.305631 | 0.241661 | 0.402415 | -0.494815 | 0.252206 | 0 | 1 |
| 178 | 1.433007 | 0.217051 | -0.388425 | -1.158798 | 2.068592 | 1.278810 | -1.193547 | -0.909321 | -0.207122 | 2.062093 | 1.374797 | 0.383804 | 1.569650 | 0 | 1 |
| 179 | 1.371536 | 0.624596 | -0.082552 | 0.444824 | 1.959112 | -0.736647 | -1.448177 | 0.624897 | 1.304939 | -0.025270 | -0.519401 | 0.592135 | 1.312240 | 0 | 1 |
| 180 | 0.815061 | -1.210119 | 0.844643 | -1.152602 | -0.216878 | -1.573232 | -0.065062 | 2.136014 | -0.285964 | 1.827988 | -0.982121 | 1.139199 | 0.936226 | 2 | 1 |
| 181 | 0.814962 | -1.028970 | -1.340094 | -1.579784 | 0.774822 | -0.351654 | -2.148181 | 2.772395 | 1.638263 | -0.394371 | 1.796246 | 1.182459 | 0.824064 | 2 | 1 |
| 182 | 1.615277 | 0.706391 | -0.611277 | 0.513438 | 0.987249 | 1.226124 | 0.240966 | 0.485917 | 1.355615 | -0.480955 | -0.255325 | -0.370864 | 0.107591 | 0 | 1 |
| 183 | 0.290224 | 0.578762 | 0.024629 | 0.119894 | 0.626180 | 1.025427 | 0.180541 | -0.504388 | -1.085411 | -1.413825 | 0.811722 | 0.640653 | 0.433677 | 0 | 1 |
| 184 | 0.086408 | -1.394139 | -0.501233 | 1.251905 | -0.481983 | 0.026482 | -1.317983 | -0.580623 | -0.160381 | -0.718194 | 0.110108 | -0.183905 | 0.074891 | 2 | 1 |
| 185 | 0.024909 | -0.713904 | -1.235134 | -0.194562 | 0.155358 | -0.586587 | -0.455970 | 0.577457 | 1.172268 | 0.468799 | 0.500130 | 1.133624 | 0.192845 | 2 | 1 |
186 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82cc917b8>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[4]))
X = df_n_ps_std_mfcc[4].drop(columns='Cluster')
y = df_n_ps[4]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(164, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (20, 20, 20), 'learning_rate_init': 0.003, 'max_iter': 400}, que permiten obtener un Accuracy de 72.56% y un Kappa del 45.12
Tiempo total: 20.25 minutos
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_5" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_5 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_14 (Dense) (None, 20) 280 _________________________________________________________________ dense_15 (Dense) (None, 20) 420 _________________________________________________________________ dense_16 (Dense) (None, 20) 420 _________________________________________________________________ dense_17 (Dense) (None, 1) 21 ================================================================= Total params: 1,141 Trainable params: 1,141 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 164 samples, validate on 55 samples Epoch 1/400 164/164 [==============================] - 0s 1ms/step - loss: 0.7149 - accuracy: 0.5244 - val_loss: 0.7183 - val_accuracy: 0.5273 Epoch 2/400 164/164 [==============================] - 0s 67us/step - loss: 0.6628 - accuracy: 0.5854 - val_loss: 0.7022 - val_accuracy: 0.6182 Epoch 3/400 164/164 [==============================] - 0s 73us/step - loss: 0.6490 - accuracy: 0.6707 - val_loss: 0.6958 - val_accuracy: 0.6364 Epoch 4/400 164/164 [==============================] - 0s 67us/step - loss: 0.6445 - accuracy: 0.6707 - val_loss: 0.6991 - val_accuracy: 0.6182 Epoch 5/400 164/164 [==============================] - 0s 73us/step - loss: 0.6399 - accuracy: 0.6646 - val_loss: 0.6941 - val_accuracy: 0.6182 Epoch 6/400 164/164 [==============================] - 0s 73us/step - loss: 0.6294 - accuracy: 0.6951 - val_loss: 0.6918 - val_accuracy: 0.6182 Epoch 7/400 164/164 [==============================] - 0s 104us/step - loss: 0.6196 - accuracy: 0.6951 - val_loss: 0.6923 - val_accuracy: 0.6182 Epoch 8/400 164/164 [==============================] - 0s 73us/step - loss: 0.6167 - accuracy: 0.7134 - val_loss: 0.6890 - val_accuracy: 0.6000 Epoch 9/400 164/164 [==============================] - 0s 67us/step - loss: 0.6139 - accuracy: 0.6829 - val_loss: 0.6929 - val_accuracy: 0.6000 Epoch 10/400 164/164 [==============================] - 0s 73us/step - loss: 0.6108 - accuracy: 0.6890 - val_loss: 0.6929 - val_accuracy: 0.6182 Epoch 11/400 164/164 [==============================] - 0s 79us/step - loss: 0.6044 - accuracy: 0.7012 - val_loss: 0.6875 - val_accuracy: 0.6000 Epoch 12/400 164/164 [==============================] - 0s 79us/step - loss: 0.5918 - accuracy: 0.7073 - val_loss: 0.6807 - val_accuracy: 0.6545 Epoch 13/400 164/164 [==============================] - 0s 67us/step - loss: 0.5862 - accuracy: 0.7073 - val_loss: 0.6789 - val_accuracy: 0.6545 Epoch 14/400 164/164 [==============================] - 0s 110us/step - loss: 0.5762 - accuracy: 0.6890 - val_loss: 0.6712 - val_accuracy: 0.6545 Epoch 15/400 164/164 [==============================] - 0s 73us/step - loss: 0.5665 - accuracy: 0.7073 - val_loss: 0.6710 - val_accuracy: 0.6727 Epoch 16/400 164/164 [==============================] - 0s 73us/step - loss: 0.5596 - accuracy: 0.7134 - val_loss: 0.6779 - val_accuracy: 0.6727 Epoch 17/400 164/164 [==============================] - 0s 73us/step - loss: 0.5537 - accuracy: 0.7012 - val_loss: 0.6888 - val_accuracy: 0.6727 Epoch 18/400 164/164 [==============================] - 0s 73us/step - loss: 0.5451 - accuracy: 0.7134 - val_loss: 0.6966 - val_accuracy: 0.6364 Epoch 19/400 164/164 [==============================] - 0s 79us/step - loss: 0.5349 - accuracy: 0.7256 - val_loss: 0.7024 - val_accuracy: 0.5818 Epoch 20/400 164/164 [==============================] - 0s 79us/step - loss: 0.5252 - accuracy: 0.7378 - val_loss: 0.7101 - val_accuracy: 0.6364 Epoch 21/400 164/164 [==============================] - 0s 104us/step - loss: 0.5172 - accuracy: 0.7256 - val_loss: 0.7167 - val_accuracy: 0.6182 Epoch 22/400 164/164 [==============================] - 0s 79us/step - loss: 0.5140 - accuracy: 0.7317 - val_loss: 0.7129 - val_accuracy: 0.6000 Epoch 23/400 164/164 [==============================] - 0s 73us/step - loss: 0.5070 - accuracy: 0.7378 - val_loss: 0.6954 - val_accuracy: 0.6000 Epoch 24/400 164/164 [==============================] - 0s 73us/step - loss: 0.4857 - accuracy: 0.7744 - val_loss: 0.6973 - val_accuracy: 0.5818 Epoch 25/400 164/164 [==============================] - 0s 79us/step - loss: 0.4784 - accuracy: 0.7927 - val_loss: 0.6920 - val_accuracy: 0.6182 Epoch 00025: ReduceLROnPlateau reducing learning rate to 0.001500000013038516. Epoch 26/400 164/164 [==============================] - 0s 73us/step - loss: 0.4691 - accuracy: 0.8110 - val_loss: 0.6896 - val_accuracy: 0.6364 Epoch 27/400 164/164 [==============================] - 0s 85us/step - loss: 0.4629 - accuracy: 0.8110 - val_loss: 0.6919 - val_accuracy: 0.6182 Epoch 28/400 164/164 [==============================] - 0s 85us/step - loss: 0.4541 - accuracy: 0.8110 - val_loss: 0.6785 - val_accuracy: 0.6364 Epoch 29/400 164/164 [==============================] - 0s 73us/step - loss: 0.4506 - accuracy: 0.8049 - val_loss: 0.6732 - val_accuracy: 0.6364 Epoch 30/400 164/164 [==============================] - 0s 79us/step - loss: 0.4403 - accuracy: 0.8171 - val_loss: 0.6752 - val_accuracy: 0.6182 Epoch 31/400 164/164 [==============================] - 0s 110us/step - loss: 0.4384 - accuracy: 0.8476 - val_loss: 0.6800 - val_accuracy: 0.6182 Epoch 32/400 164/164 [==============================] - 0s 79us/step - loss: 0.4345 - accuracy: 0.8537 - val_loss: 0.6872 - val_accuracy: 0.6182 Epoch 33/400 164/164 [==============================] - 0s 98us/step - loss: 0.4265 - accuracy: 0.8598 - val_loss: 0.6875 - val_accuracy: 0.6545 Epoch 34/400 164/164 [==============================] - 0s 79us/step - loss: 0.4183 - accuracy: 0.8476 - val_loss: 0.6883 - val_accuracy: 0.6727 Epoch 35/400 164/164 [==============================] - 0s 67us/step - loss: 0.4100 - accuracy: 0.8415 - val_loss: 0.6959 - val_accuracy: 0.6727 Epoch 00035: ReduceLROnPlateau reducing learning rate to 0.000750000006519258. Epoch 36/400 164/164 [==============================] - 0s 67us/step - loss: 0.4034 - accuracy: 0.8293 - val_loss: 0.6946 - val_accuracy: 0.7091 Epoch 37/400 164/164 [==============================] - 0s 67us/step - loss: 0.3991 - accuracy: 0.8293 - val_loss: 0.6968 - val_accuracy: 0.6909 Epoch 38/400 164/164 [==============================] - 0s 85us/step - loss: 0.3958 - accuracy: 0.8293 - val_loss: 0.6987 - val_accuracy: 0.6909 Epoch 39/400 164/164 [==============================] - 0s 97us/step - loss: 0.3918 - accuracy: 0.8354 - val_loss: 0.7003 - val_accuracy: 0.6909 Epoch 40/400 164/164 [==============================] - 0s 79us/step - loss: 0.3892 - accuracy: 0.8354 - val_loss: 0.7088 - val_accuracy: 0.6545 Epoch 41/400 164/164 [==============================] - 0s 73us/step - loss: 0.3852 - accuracy: 0.8354 - val_loss: 0.7138 - val_accuracy: 0.6545 Epoch 42/400 164/164 [==============================] - 0s 79us/step - loss: 0.3819 - accuracy: 0.8476 - val_loss: 0.7122 - val_accuracy: 0.6909 Epoch 43/400 164/164 [==============================] - 0s 79us/step - loss: 0.3787 - accuracy: 0.8598 - val_loss: 0.7089 - val_accuracy: 0.6909 Epoch 44/400 164/164 [==============================] - 0s 73us/step - loss: 0.3763 - accuracy: 0.8598 - val_loss: 0.7081 - val_accuracy: 0.6909 Epoch 45/400 164/164 [==============================] - 0s 73us/step - loss: 0.3726 - accuracy: 0.8598 - val_loss: 0.7091 - val_accuracy: 0.6727 Epoch 46/400 164/164 [==============================] - 0s 73us/step - loss: 0.3678 - accuracy: 0.8598 - val_loss: 0.7040 - val_accuracy: 0.6727 Epoch 00046: ReduceLROnPlateau reducing learning rate to 0.000375000003259629. Epoch 47/400 164/164 [==============================] - 0s 98us/step - loss: 0.3655 - accuracy: 0.8598 - val_loss: 0.7071 - val_accuracy: 0.6727 Epoch 48/400 164/164 [==============================] - 0s 85us/step - loss: 0.3630 - accuracy: 0.8598 - val_loss: 0.7106 - val_accuracy: 0.6545 Epoch 49/400 164/164 [==============================] - 0s 73us/step - loss: 0.3608 - accuracy: 0.8598 - val_loss: 0.7129 - val_accuracy: 0.6545 Epoch 50/400 164/164 [==============================] - 0s 79us/step - loss: 0.3591 - accuracy: 0.8659 - val_loss: 0.7160 - val_accuracy: 0.6364 Epoch 51/400 164/164 [==============================] - 0s 73us/step - loss: 0.3573 - accuracy: 0.8659 - val_loss: 0.7198 - val_accuracy: 0.6364 Epoch 52/400 164/164 [==============================] - 0s 79us/step - loss: 0.3555 - accuracy: 0.8659 - val_loss: 0.7224 - val_accuracy: 0.6545 Epoch 53/400 164/164 [==============================] - 0s 73us/step - loss: 0.3539 - accuracy: 0.8659 - val_loss: 0.7248 - val_accuracy: 0.6364 Epoch 54/400 164/164 [==============================] - 0s 85us/step - loss: 0.3524 - accuracy: 0.8659 - val_loss: 0.7281 - val_accuracy: 0.6364 Epoch 55/400 164/164 [==============================] - 0s 98us/step - loss: 0.3514 - accuracy: 0.8902 - val_loss: 0.7324 - val_accuracy: 0.6364 Epoch 56/400 164/164 [==============================] - 0s 79us/step - loss: 0.3504 - accuracy: 0.8841 - val_loss: 0.7356 - val_accuracy: 0.6364 Epoch 00056: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145. Epoch 57/400 164/164 [==============================] - 0s 73us/step - loss: 0.3483 - accuracy: 0.8902 - val_loss: 0.7344 - val_accuracy: 0.6364 Epoch 58/400 164/164 [==============================] - 0s 79us/step - loss: 0.3472 - accuracy: 0.8902 - val_loss: 0.7329 - val_accuracy: 0.6364 Epoch 59/400 164/164 [==============================] - 0s 79us/step - loss: 0.3463 - accuracy: 0.8963 - val_loss: 0.7335 - val_accuracy: 0.6364 Epoch 60/400 164/164 [==============================] - 0s 73us/step - loss: 0.3449 - accuracy: 0.8963 - val_loss: 0.7348 - val_accuracy: 0.6364 Epoch 61/400 164/164 [==============================] - 0s 73us/step - loss: 0.3439 - accuracy: 0.8963 - val_loss: 0.7356 - val_accuracy: 0.6364 Epoch 62/400 164/164 [==============================] - 0s 73us/step - loss: 0.3427 - accuracy: 0.8902 - val_loss: 0.7366 - val_accuracy: 0.6364 Epoch 63/400 164/164 [==============================] - 0s 98us/step - loss: 0.3420 - accuracy: 0.8902 - val_loss: 0.7351 - val_accuracy: 0.6364 Epoch 64/400 164/164 [==============================] - 0s 79us/step - loss: 0.3409 - accuracy: 0.8902 - val_loss: 0.7358 - val_accuracy: 0.6364 Epoch 65/400 164/164 [==============================] - 0s 73us/step - loss: 0.3398 - accuracy: 0.8780 - val_loss: 0.7364 - val_accuracy: 0.6364 Epoch 66/400 164/164 [==============================] - 0s 73us/step - loss: 0.3389 - accuracy: 0.8780 - val_loss: 0.7359 - val_accuracy: 0.6545 Epoch 00066: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05. Epoch 67/400 164/164 [==============================] - 0s 128us/step - loss: 0.3380 - accuracy: 0.8841 - val_loss: 0.7359 - val_accuracy: 0.6545 Epoch 68/400 164/164 [==============================] - 0s 79us/step - loss: 0.3373 - accuracy: 0.8841 - val_loss: 0.7361 - val_accuracy: 0.6364 Epoch 69/400 164/164 [==============================] - 0s 79us/step - loss: 0.3372 - accuracy: 0.8902 - val_loss: 0.7360 - val_accuracy: 0.6364 Epoch 70/400 164/164 [==============================] - 0s 73us/step - loss: 0.3364 - accuracy: 0.8902 - val_loss: 0.7358 - val_accuracy: 0.6364 Epoch 71/400 164/164 [==============================] - 0s 79us/step - loss: 0.3359 - accuracy: 0.8902 - val_loss: 0.7358 - val_accuracy: 0.6364 Epoch 72/400 164/164 [==============================] - 0s 91us/step - loss: 0.3356 - accuracy: 0.8841 - val_loss: 0.7359 - val_accuracy: 0.6545 Epoch 73/400 164/164 [==============================] - 0s 85us/step - loss: 0.3349 - accuracy: 0.8841 - val_loss: 0.7369 - val_accuracy: 0.6545 Epoch 74/400 164/164 [==============================] - 0s 67us/step - loss: 0.3343 - accuracy: 0.8841 - val_loss: 0.7377 - val_accuracy: 0.6545 Epoch 75/400 164/164 [==============================] - 0s 85us/step - loss: 0.3341 - accuracy: 0.8780 - val_loss: 0.7388 - val_accuracy: 0.6364 Epoch 76/400 164/164 [==============================] - 0s 85us/step - loss: 0.3336 - accuracy: 0.8780 - val_loss: 0.7396 - val_accuracy: 0.6364 Epoch 00076: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05. Epoch 77/400 164/164 [==============================] - 0s 73us/step - loss: 0.3332 - accuracy: 0.8841 - val_loss: 0.7399 - val_accuracy: 0.6364 Epoch 78/400 164/164 [==============================] - 0s 73us/step - loss: 0.3331 - accuracy: 0.8841 - val_loss: 0.7402 - val_accuracy: 0.6364 Epoch 79/400 164/164 [==============================] - 0s 73us/step - loss: 0.3328 - accuracy: 0.8841 - val_loss: 0.7402 - val_accuracy: 0.6364 Epoch 80/400 164/164 [==============================] - 0s 73us/step - loss: 0.3326 - accuracy: 0.8841 - val_loss: 0.7405 - val_accuracy: 0.6364 Epoch 81/400 164/164 [==============================] - 0s 73us/step - loss: 0.3324 - accuracy: 0.8841 - val_loss: 0.7409 - val_accuracy: 0.6364 Epoch 82/400 164/164 [==============================] - 0s 79us/step - loss: 0.3322 - accuracy: 0.8841 - val_loss: 0.7409 - val_accuracy: 0.6364 Epoch 83/400 164/164 [==============================] - 0s 79us/step - loss: 0.3319 - accuracy: 0.8902 - val_loss: 0.7408 - val_accuracy: 0.6364 Epoch 84/400 164/164 [==============================] - 0s 91us/step - loss: 0.3317 - accuracy: 0.8902 - val_loss: 0.7409 - val_accuracy: 0.6364 Epoch 85/400 164/164 [==============================] - 0s 79us/step - loss: 0.3315 - accuracy: 0.8902 - val_loss: 0.7411 - val_accuracy: 0.6364 Epoch 86/400 164/164 [==============================] - 0s 73us/step - loss: 0.3312 - accuracy: 0.8902 - val_loss: 0.7412 - val_accuracy: 0.6364 Epoch 00086: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05. Epoch 87/400 164/164 [==============================] - 0s 79us/step - loss: 0.3310 - accuracy: 0.8902 - val_loss: 0.7412 - val_accuracy: 0.6364 Epoch 88/400 164/164 [==============================] - 0s 104us/step - loss: 0.3309 - accuracy: 0.8902 - val_loss: 0.7412 - val_accuracy: 0.6364 Epoch 89/400 164/164 [==============================] - 0s 104us/step - loss: 0.3307 - accuracy: 0.8902 - val_loss: 0.7412 - val_accuracy: 0.6364 Epoch 90/400 164/164 [==============================] - 0s 91us/step - loss: 0.3306 - accuracy: 0.8902 - val_loss: 0.7413 - val_accuracy: 0.6364 Epoch 91/400 164/164 [==============================] - 0s 98us/step - loss: 0.3305 - accuracy: 0.8902 - val_loss: 0.7415 - val_accuracy: 0.6364 Epoch 92/400 164/164 [==============================] - 0s 73us/step - loss: 0.3305 - accuracy: 0.8963 - val_loss: 0.7418 - val_accuracy: 0.6364 Epoch 93/400 164/164 [==============================] - 0s 79us/step - loss: 0.3304 - accuracy: 0.8963 - val_loss: 0.7419 - val_accuracy: 0.6364 Epoch 94/400 164/164 [==============================] - 0s 79us/step - loss: 0.3302 - accuracy: 0.8963 - val_loss: 0.7420 - val_accuracy: 0.6364 Epoch 95/400 164/164 [==============================] - 0s 91us/step - loss: 0.3301 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6364 Epoch 96/400 164/164 [==============================] - 0s 79us/step - loss: 0.3300 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6545 Epoch 00096: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05. Epoch 97/400 164/164 [==============================] - 0s 79us/step - loss: 0.3299 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 98/400 164/164 [==============================] - 0s 79us/step - loss: 0.3298 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 99/400 164/164 [==============================] - 0s 91us/step - loss: 0.3298 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6545 Epoch 100/400 164/164 [==============================] - 0s 79us/step - loss: 0.3297 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6545 Epoch 101/400 164/164 [==============================] - 0s 79us/step - loss: 0.3297 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6545 Epoch 102/400 164/164 [==============================] - 0s 79us/step - loss: 0.3296 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 103/400 164/164 [==============================] - 0s 73us/step - loss: 0.3296 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 104/400 164/164 [==============================] - 0s 97us/step - loss: 0.3295 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 105/400 164/164 [==============================] - 0s 79us/step - loss: 0.3295 - accuracy: 0.8963 - val_loss: 0.7424 - val_accuracy: 0.6545 Epoch 106/400 164/164 [==============================] - 0s 67us/step - loss: 0.3294 - accuracy: 0.8963 - val_loss: 0.7424 - val_accuracy: 0.6545 Epoch 00106: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06. Epoch 107/400 164/164 [==============================] - 0s 91us/step - loss: 0.3294 - accuracy: 0.8963 - val_loss: 0.7424 - val_accuracy: 0.6545 Epoch 108/400 164/164 [==============================] - 0s 85us/step - loss: 0.3293 - accuracy: 0.8963 - val_loss: 0.7424 - val_accuracy: 0.6545 Epoch 109/400 164/164 [==============================] - 0s 79us/step - loss: 0.3293 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 110/400 164/164 [==============================] - 0s 79us/step - loss: 0.3293 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 111/400 164/164 [==============================] - 0s 91us/step - loss: 0.3292 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 112/400 164/164 [==============================] - 0s 79us/step - loss: 0.3292 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 113/400 164/164 [==============================] - 0s 79us/step - loss: 0.3292 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 114/400 164/164 [==============================] - 0s 85us/step - loss: 0.3292 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 115/400 164/164 [==============================] - 0s 85us/step - loss: 0.3291 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 116/400 164/164 [==============================] - 0s 85us/step - loss: 0.3291 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 00116: ReduceLROnPlateau reducing learning rate to 2.9296875254658516e-06. Epoch 117/400 164/164 [==============================] - 0s 73us/step - loss: 0.3291 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 118/400 164/164 [==============================] - 0s 85us/step - loss: 0.3291 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 119/400 164/164 [==============================] - 0s 79us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 120/400 164/164 [==============================] - 0s 73us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 121/400 164/164 [==============================] - 0s 79us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 122/400 164/164 [==============================] - 0s 98us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 123/400 164/164 [==============================] - 0s 79us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 124/400 164/164 [==============================] - 0s 73us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 125/400 164/164 [==============================] - 0s 73us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 126/400 164/164 [==============================] - 0s 104us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 00126: ReduceLROnPlateau reducing learning rate to 1.4648437627329258e-06. Epoch 127/400 164/164 [==============================] - 0s 79us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 128/400 164/164 [==============================] - 0s 79us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 129/400 164/164 [==============================] - 0s 79us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 130/400 164/164 [==============================] - 0s 134us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 131/400 164/164 [==============================] - 0s 98us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 132/400 164/164 [==============================] - 0s 85us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 133/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 134/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 135/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 136/400 164/164 [==============================] - 0s 104us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00136: ReduceLROnPlateau reducing learning rate to 7.324218813664629e-07. Epoch 137/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 138/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 139/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 140/400 164/164 [==============================] - 0s 116us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 141/400 164/164 [==============================] - 0s 91us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 142/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 143/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 144/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 145/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 146/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00146: ReduceLROnPlateau reducing learning rate to 3.6621094068323146e-07. Epoch 147/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 148/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 149/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 150/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 151/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 152/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 153/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 154/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 155/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 156/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00156: ReduceLROnPlateau reducing learning rate to 1.8310547034161573e-07. Epoch 157/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 158/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 159/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 160/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 161/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 162/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 163/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 164/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 165/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 166/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00166: ReduceLROnPlateau reducing learning rate to 9.155273517080786e-08. Epoch 167/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 168/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 169/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 170/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 171/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 172/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 173/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 174/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 175/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 176/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00176: ReduceLROnPlateau reducing learning rate to 4.577636758540393e-08. Epoch 177/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 178/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 179/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 180/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 181/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 182/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 183/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 184/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 185/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 186/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00186: ReduceLROnPlateau reducing learning rate to 2.2888183792701966e-08. Epoch 187/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 188/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 189/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 190/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 191/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 192/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 193/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 194/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 195/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 196/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00196: ReduceLROnPlateau reducing learning rate to 1.1444091896350983e-08. Epoch 197/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 198/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 199/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 200/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 201/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 202/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 203/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 204/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 205/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 206/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00206: ReduceLROnPlateau reducing learning rate to 5.7220459481754915e-09. Epoch 207/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 208/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 209/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 210/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 211/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 212/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 213/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 214/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 215/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 216/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00216: ReduceLROnPlateau reducing learning rate to 2.8610229740877458e-09. Epoch 217/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 218/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 219/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 220/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 221/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 222/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 223/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 224/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 225/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 226/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00226: ReduceLROnPlateau reducing learning rate to 1.4305114870438729e-09. Epoch 227/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 228/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 229/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 230/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 231/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 232/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 233/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 234/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 235/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 236/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00236: ReduceLROnPlateau reducing learning rate to 7.152557435219364e-10. Epoch 237/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 238/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 239/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 240/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 241/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 242/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 243/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 244/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 245/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 246/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00246: ReduceLROnPlateau reducing learning rate to 3.576278717609682e-10. Epoch 247/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 248/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 249/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 250/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 251/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 252/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 253/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 254/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 255/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 256/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00256: ReduceLROnPlateau reducing learning rate to 1.788139358804841e-10. Epoch 257/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 258/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 259/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 260/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 261/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 262/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 263/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 264/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 265/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 266/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00266: ReduceLROnPlateau reducing learning rate to 8.940696794024205e-11. Epoch 267/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 268/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 269/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 270/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 271/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 272/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 273/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 274/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 275/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 276/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00276: ReduceLROnPlateau reducing learning rate to 4.470348397012103e-11. Epoch 277/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 278/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 279/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 280/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 281/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 282/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 283/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 284/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 285/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 286/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00286: ReduceLROnPlateau reducing learning rate to 2.2351741985060514e-11. Epoch 287/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 288/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 289/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 290/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 291/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 292/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 293/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 294/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 295/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 296/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00296: ReduceLROnPlateau reducing learning rate to 1.1175870992530257e-11. Epoch 297/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 298/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 299/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 300/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 301/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 302/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 303/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 304/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 305/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 306/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00306: ReduceLROnPlateau reducing learning rate to 5.5879354962651284e-12. Epoch 307/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 308/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 309/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 310/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 311/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 312/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 313/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 314/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 315/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 316/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00316: ReduceLROnPlateau reducing learning rate to 2.7939677481325642e-12. Epoch 317/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 318/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 319/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 320/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 321/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 322/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 323/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 324/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 325/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 326/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00326: ReduceLROnPlateau reducing learning rate to 1.3969838740662821e-12. Epoch 327/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 328/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 329/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 330/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 331/400 164/164 [==============================] - ETA: 0s - loss: 0.3229 - accuracy: 0.90 - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 332/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 333/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 334/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 335/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 336/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00336: ReduceLROnPlateau reducing learning rate to 6.984919370331411e-13. Epoch 337/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 338/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 339/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 340/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 341/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 342/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 343/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 344/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 345/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 346/400 164/164 [==============================] - 0s 97us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00346: ReduceLROnPlateau reducing learning rate to 3.4924596851657053e-13. Epoch 347/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 348/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 349/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 350/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 351/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 352/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 353/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 354/400 164/164 [==============================] - 0s 158us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 355/400 164/164 [==============================] - 0s 140us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 356/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00356: ReduceLROnPlateau reducing learning rate to 1.7462298425828526e-13. Epoch 357/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 358/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 359/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 360/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 361/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 362/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 363/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 364/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 365/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 366/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00366: ReduceLROnPlateau reducing learning rate to 8.731149212914263e-14. Epoch 367/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 368/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 369/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 370/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 371/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 372/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 373/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 374/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 375/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 376/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00376: ReduceLROnPlateau reducing learning rate to 4.3655746064571316e-14. Epoch 377/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 378/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 379/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 380/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 381/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 382/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 383/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 384/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 385/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 386/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00386: ReduceLROnPlateau reducing learning rate to 2.1827873032285658e-14. Epoch 387/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 388/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 389/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 390/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 391/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 392/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 393/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 394/400 164/164 [==============================] - 0s 140us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 395/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 396/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00396: ReduceLROnPlateau reducing learning rate to 1.0913936516142829e-14. Epoch 397/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 398/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 399/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 400/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 400)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
55/55 [==============================] - 0s 55us/step test loss: 0.7425676215778697, test accuracy: 0.6545454263687134
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.6399456521739131
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.2943956785955435
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.992062 | -0.477172 | -1.079451 | -2.369470 | -1.705431 | -0.098594 | -0.281836 | -1.432001 | -0.898623 | 0.130446 | -0.024683 | -0.312128 | 0.020392 |
| 1 | 0.843575 | -0.507672 | -0.731713 | -0.334904 | 1.442336 | -0.491141 | -0.266416 | -0.511246 | 1.004414 | 0.558777 | 0.127114 | -1.667555 | 0.835458 |
| 2 | 0.816922 | -0.263544 | 0.639646 | -0.865417 | 1.276602 | -0.245238 | 0.106722 | -0.761365 | -0.170481 | -1.443667 | -0.451102 | 1.196430 | -0.037846 |
| 3 | 4.368525 | 0.851784 | -0.671158 | -0.128467 | 2.141169 | -0.472725 | -1.437233 | -1.858760 | 1.581800 | -0.145852 | 0.107228 | 1.458238 | 1.666081 |
| 4 | 0.001312 | 0.535305 | -0.648296 | 0.221414 | 0.549478 | 0.736878 | -0.439538 | -0.138787 | 0.584258 | 0.095671 | 1.901833 | 2.909252 | 1.802578 |
| 5 | -0.236754 | 0.488978 | 0.203743 | 0.088401 | -0.151814 | 0.811707 | -0.092973 | 0.153518 | -0.936863 | 0.354100 | 0.123352 | 1.318569 | 1.097711 |
| 6 | -0.842496 | 0.742173 | 0.068601 | 1.394492 | -0.276167 | 1.301853 | 0.336343 | 1.077540 | -1.118983 | 1.688235 | -0.103661 | 1.224883 | 0.350956 |
| 7 | -0.952702 | 1.078642 | -0.563379 | -0.018149 | -0.073042 | -0.591301 | -1.392389 | 0.209234 | 0.725065 | 0.064350 | 0.034449 | 0.581953 | 2.151966 |
| 8 | 0.046457 | -0.093025 | -0.804385 | 0.542662 | -0.130939 | 0.042792 | 1.198959 | -0.559116 | 0.017192 | -0.249308 | 0.747851 | -0.035599 | 0.995166 |
| 9 | -0.781158 | 0.099463 | 0.196737 | 2.462131 | 0.316140 | -0.369698 | 2.196715 | -0.800443 | 2.137687 | 1.438443 | 0.055279 | -0.284437 | 1.702942 |
| 10 | -0.906167 | 0.568017 | 0.700382 | 2.876646 | -0.809125 | -0.491839 | 1.801564 | -2.406947 | 1.939246 | 1.397556 | 0.709408 | -0.423394 | 1.773713 |
| 11 | 1.172687 | 1.292213 | -0.402038 | 0.087342 | 0.324539 | 0.973336 | -0.548282 | 0.781195 | 0.846038 | 0.464514 | -1.030463 | -0.559243 | 0.168727 |
| 12 | 0.367875 | 1.949889 | 0.516382 | 0.657124 | -0.534306 | 0.575187 | -0.750861 | 0.247200 | -0.232297 | 0.332174 | -0.426787 | 0.318763 | 0.083316 |
| 13 | 1.270520 | 1.194102 | 0.267933 | 0.676186 | 0.394734 | -0.709975 | -0.047626 | 1.113385 | 0.339962 | 0.424937 | -0.528480 | 0.671225 | 0.078062 |
| 14 | -0.095931 | 0.792392 | 0.626113 | 0.189989 | 0.315198 | -0.175744 | 0.011713 | -0.072196 | 0.742338 | 0.974567 | 0.935685 | 0.083454 | 0.970157 |
| 15 | -0.322645 | 0.977766 | 0.685697 | 0.670670 | 0.997903 | 0.619018 | 0.498110 | -0.016728 | 0.445370 | -0.102204 | 0.199517 | -0.315303 | 0.347920 |
| 16 | 0.565974 | 0.440551 | 0.402995 | 1.815814 | 1.906139 | 1.105013 | 1.256180 | 0.907086 | 0.592851 | -0.159427 | 1.013051 | -0.620202 | 1.259932 |
| 17 | -0.863540 | 0.887127 | 1.387720 | -0.082168 | -0.694633 | -0.810037 | 1.251697 | -0.443532 | 0.307506 | 0.253798 | -0.292483 | 0.030812 | 0.176350 |
| 18 | -0.822258 | -0.630193 | -0.672294 | -0.279417 | -0.731983 | -1.510167 | -1.393705 | -0.161872 | 0.722297 | 0.910604 | -0.610303 | 0.380547 | 1.296315 |
| 19 | -0.889164 | 0.641922 | 2.278761 | 0.190213 | -0.341231 | -0.624107 | 1.228820 | -0.549441 | -0.662942 | 0.481866 | -0.541347 | -1.061735 | -0.122227 |
| 20 | 0.795964 | 0.484784 | 0.898919 | 0.027625 | 0.415359 | 0.271286 | 0.366966 | -0.498975 | 0.300352 | 0.216702 | 0.361195 | -0.771976 | 0.085971 |
| 21 | 0.168183 | -0.077353 | 1.019887 | -0.637065 | 0.731534 | 0.877245 | 1.225125 | -0.566997 | -0.452222 | -1.105384 | 0.185636 | -0.782808 | -0.224975 |
| 22 | 0.510023 | -0.099060 | 0.064384 | -0.039933 | 0.786951 | 0.119530 | -0.259052 | -0.881354 | -0.113425 | 1.191274 | 0.335443 | -0.189618 | -0.337688 |
| 23 | 0.216210 | -0.069447 | 0.974822 | -0.626273 | 0.835854 | 0.914236 | 1.226463 | -0.369525 | -0.398299 | -1.146613 | 0.026274 | -0.944475 | -0.192948 |
| 24 | -0.239273 | -0.518568 | -0.127834 | 0.045011 | 0.403223 | 0.368253 | -0.584902 | -0.905436 | -0.405699 | 0.129383 | 0.809611 | -0.174138 | -0.115393 |
| 25 | -1.241907 | 1.355534 | -0.693470 | 0.793789 | 0.606007 | 0.930263 | 0.009323 | -0.712463 | 0.037916 | -0.182143 | 1.212760 | -0.083882 | 0.639662 |
| 26 | -0.847436 | 1.180146 | -0.489592 | 1.189572 | -0.457645 | -0.163979 | -0.010812 | -0.765561 | -0.347488 | -0.216575 | 0.804302 | -0.236378 | 0.481212 |
| 27 | -0.378383 | 1.017722 | -1.812001 | 0.443514 | 0.583209 | 1.709730 | 0.715521 | -0.076610 | 0.416120 | 0.013436 | 0.420025 | -0.925263 | 0.626400 |
| 28 | 0.245370 | 1.187084 | 1.056929 | 2.013063 | -0.505622 | 1.228583 | -1.158143 | 0.622932 | 0.113512 | 0.948397 | 0.008252 | 1.035839 | -0.691702 |
| 29 | -0.623386 | 1.368898 | 1.216933 | 1.961377 | 0.744541 | 1.555516 | -1.205283 | -0.252995 | -0.325624 | 0.538668 | 0.197646 | 0.356450 | -0.219812 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 189 | -0.565077 | 0.809784 | 0.557457 | 0.815038 | 0.823053 | -0.931359 | -0.039244 | -0.199068 | 0.083690 | -0.235063 | -0.030800 | -0.564557 | -0.253507 |
| 190 | -0.602848 | 0.638838 | 0.763481 | -0.424641 | -0.810302 | -0.951734 | -0.732024 | -0.504038 | 0.379372 | 0.748895 | -0.593820 | -0.772491 | 0.175752 |
| 191 | -1.094031 | -0.896961 | 0.400325 | -1.635971 | -1.099938 | -1.091799 | -0.593281 | 0.890889 | 0.984647 | 0.584509 | 0.318496 | 0.175062 | -0.783524 |
| 192 | -0.348357 | 0.944340 | 0.239675 | 0.003612 | -1.370450 | -0.996597 | -0.616405 | 0.161481 | -0.258760 | 0.534721 | -0.431338 | 0.376456 | -1.623026 |
| 193 | 2.110671 | -1.005236 | 0.268022 | 0.459390 | -1.985350 | 0.405677 | -0.361571 | -1.272053 | -0.873345 | 2.111218 | -0.246708 | 0.798456 | 1.067252 |
| 194 | 1.222194 | -1.600122 | -1.149302 | 0.230839 | -0.213026 | -1.572114 | 0.486447 | -0.770701 | 0.244895 | 2.689114 | -2.296486 | 0.718338 | -1.220356 |
| 195 | -0.509789 | -0.757711 | 0.189267 | 0.516644 | 0.750906 | -1.485714 | 2.485824 | -1.204754 | -3.373113 | -0.450016 | -1.091178 | -0.474728 | -0.522197 |
| 196 | 0.194175 | -0.618441 | -1.090420 | 0.233017 | -1.492602 | -0.342192 | -1.612833 | 0.714990 | 0.072755 | -0.026932 | 0.464029 | 0.212333 | 1.204262 |
| 197 | 0.297635 | -0.727616 | -1.927078 | -0.145347 | -0.990256 | 0.052935 | -1.791108 | -0.351333 | -0.064903 | 0.201842 | 1.581215 | 1.084453 | -0.168841 |
| 198 | -0.271030 | -0.575137 | -1.005334 | -0.238705 | -0.931830 | -1.319114 | -0.668613 | 0.510822 | 0.209623 | 0.487577 | 0.154874 | 0.133768 | 1.259548 |
| 199 | 0.059096 | -0.370313 | -0.760047 | 0.706270 | -2.488266 | -1.336692 | -0.683584 | 0.436366 | -0.150281 | -0.711308 | -0.851205 | 0.253942 | -0.052516 |
| 200 | 0.147539 | -0.233608 | -0.578016 | 0.870637 | -2.418094 | -1.286070 | -0.692623 | 0.342693 | 0.015890 | -0.795418 | -1.221248 | 0.309493 | -0.526480 |
| 201 | -0.076214 | -1.055629 | 0.159389 | -0.403318 | -0.111273 | -1.325990 | -0.867502 | 0.519381 | 0.192007 | -0.024629 | 0.220420 | 0.551046 | 0.399728 |
| 202 | 1.468986 | 0.518464 | 1.475456 | -1.400891 | 0.408186 | -1.831201 | 1.474742 | 0.566660 | -0.403197 | -1.295176 | -0.443787 | -1.884346 | -1.993491 |
| 203 | -1.739107 | 0.192104 | -0.670709 | -1.236237 | -1.672915 | -0.680127 | 0.027148 | 0.524909 | 1.865754 | -0.634310 | -0.607429 | -1.471191 | -0.632982 |
| 204 | -0.663868 | -0.862566 | -0.329803 | -0.857680 | 0.167824 | -0.013328 | 0.176565 | 0.125832 | 0.609671 | -1.296827 | -0.435986 | -1.341223 | -0.977207 |
| 205 | -0.739818 | -0.668220 | -0.077479 | 0.026286 | 0.027801 | 0.040659 | -0.161646 | -1.046948 | -1.248976 | -0.449243 | 1.046834 | 1.381194 | 1.646325 |
| 206 | 0.475752 | 0.695473 | -0.072097 | 1.081397 | -0.366985 | -2.008080 | 0.515734 | 0.005330 | 1.193800 | -0.841825 | -2.650200 | -3.862624 | -2.115507 |
| 207 | -1.331365 | -1.632552 | -0.876636 | 0.076190 | 1.187799 | 1.138590 | 1.235955 | 1.583447 | 0.890342 | -1.587964 | 0.546109 | 1.565567 | 1.756993 |
| 208 | -0.397476 | 0.090963 | 1.217996 | 0.773741 | 1.107204 | -1.125870 | -0.915396 | -1.130561 | -1.914456 | -0.664474 | -0.226576 | 0.112420 | 0.235011 |
| 209 | -0.465823 | -1.372705 | -0.445436 | 0.316510 | -1.492946 | -1.103783 | 0.353513 | -0.311377 | -1.095388 | -0.615078 | -0.585868 | 0.172807 | -0.860564 |
| 210 | -0.594535 | -1.761364 | -1.069906 | -0.502969 | -1.411276 | -0.906350 | -0.559102 | -1.240920 | -2.254196 | -1.206339 | -0.528047 | 0.924112 | 0.472298 |
| 211 | -1.022693 | 0.373374 | -0.104205 | -0.815628 | -0.574733 | 0.906934 | 0.765114 | -0.015386 | 0.110695 | 1.832325 | 0.712557 | -0.951976 | -0.678869 |
| 212 | -0.967902 | 0.155275 | 0.013938 | -0.549105 | -0.907792 | 0.881907 | 0.609589 | -0.135010 | -0.373473 | 1.152134 | 0.386511 | -0.744687 | -0.447017 |
| 213 | -1.238242 | -0.062983 | -0.133082 | -0.158458 | -0.338086 | -0.411874 | 0.964537 | 0.870379 | 0.530337 | 0.858339 | 0.489332 | -1.190977 | -1.340484 |
| 214 | 0.349761 | -1.391267 | -3.069473 | 0.840195 | 1.044391 | -1.052018 | 1.004856 | 1.478511 | 1.210060 | -1.145325 | 2.653757 | 1.937234 | 0.592139 |
| 215 | 0.782819 | -1.300386 | -0.487318 | 0.850960 | -2.046427 | 1.050631 | 0.289069 | 2.400271 | 2.707288 | -0.278238 | 0.152360 | 1.912210 | -0.208225 |
| 216 | 1.847553 | -1.059174 | -0.808403 | 0.400706 | -0.275009 | 0.409744 | -0.141885 | 0.706348 | 0.476002 | 0.990111 | -0.168504 | 0.856440 | -0.395652 |
| 217 | 2.608478 | 0.174234 | 2.534211 | -0.985597 | -0.436400 | 3.751943 | 1.560179 | -2.367095 | 1.272529 | 2.464209 | -0.954336 | 0.310720 | -1.209456 |
| 218 | -0.069569 | 0.418008 | -0.004324 | 1.330358 | 0.365352 | -0.582788 | -0.527444 | -0.298114 | -0.353021 | -1.118883 | -0.459230 | -0.986241 | -0.041010 |
219 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[2847.0, 2572.5760570812117, 2370.209947155015, 2235.6129406180157, 2112.951551625758, 2041.1809211260454, 1982.3615393500422, 1899.0667595696164, 1851.9267246215204, 1760.4468946465518, 1745.79714786859, 1689.1350809615656, 1657.4940102564742, 1625.370413913055]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82d2ffda0>]
K=3
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 2, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 2,
1, 2, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0,
0, 0, 1, 1, 2, 0, 2, 2, 0, 0, 0, 1, 1, 0, 1, 1, 1, 2, 0, 0, 1, 1,
1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 2,
0, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 0, 1, 0, 2, 2, 2, 2, 1, 2, 2, 2,
2, 0, 1, 2, 0, 0, 2, 2, 2, 2, 1, 1, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0,
0, 0, 1, 0, 2, 1, 1, 1, 2, 0, 1, 0, 0, 1, 1, 1, 2, 0, 1, 2, 2, 2,
1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 0, 1, 0, 0, 2, 0, 2, 2, 2, 0, 2,
2, 1, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 1, 2, 1, 1, 1, 2, 1, 1,
1, 1, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 1, 2, 2, 2, 0, 0, 1, 0, 1])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 2, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 2,
1, 2, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0,
0, 0, 1, 1, 2, 0, 2, 2, 0, 0, 0, 1, 1, 0, 1, 1, 1, 2, 0, 0, 1, 1,
1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 2,
0, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 0, 1, 0, 2, 2, 2, 2, 1, 2, 2, 2,
2, 0, 1, 2, 0, 0, 2, 2, 2, 2, 1, 1, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0,
0, 0, 1, 0, 2, 1, 1, 1, 2, 0, 1, 0, 0, 1, 1, 1, 2, 0, 1, 2, 2, 2,
1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 0, 1, 0, 0, 2, 0, 2, 2, 2, 0, 2,
2, 1, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 1, 2, 1, 1, 1, 2, 1, 1,
1, 1, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 1, 2, 2, 2, 0, 0, 1, 0, 1])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.992062 | -0.477172 | -1.079451 | -2.369470 | -1.705431 | -0.098594 | -0.281836 | -1.432001 | -0.898623 | 0.130446 | -0.024683 | -0.312128 | 0.020392 | 1 | 0 |
| 1 | 0.843575 | -0.507672 | -0.731713 | -0.334904 | 1.442336 | -0.491141 | -0.266416 | -0.511246 | 1.004414 | 0.558777 | 0.127114 | -1.667555 | 0.835458 | 2 | 0 |
| 2 | 0.816922 | -0.263544 | 0.639646 | -0.865417 | 1.276602 | -0.245238 | 0.106722 | -0.761365 | -0.170481 | -1.443667 | -0.451102 | 1.196430 | -0.037846 | 1 | 0 |
| 3 | 4.368525 | 0.851784 | -0.671158 | -0.128467 | 2.141169 | -0.472725 | -1.437233 | -1.858760 | 1.581800 | -0.145852 | 0.107228 | 1.458238 | 1.666081 | 1 | 0 |
| 4 | 0.001312 | 0.535305 | -0.648296 | 0.221414 | 0.549478 | 0.736878 | -0.439538 | -0.138787 | 0.584258 | 0.095671 | 1.901833 | 2.909252 | 1.802578 | 0 | 0 |
| 5 | -0.236754 | 0.488978 | 0.203743 | 0.088401 | -0.151814 | 0.811707 | -0.092973 | 0.153518 | -0.936863 | 0.354100 | 0.123352 | 1.318569 | 1.097711 | 0 | 0 |
| 6 | -0.842496 | 0.742173 | 0.068601 | 1.394492 | -0.276167 | 1.301853 | 0.336343 | 1.077540 | -1.118983 | 1.688235 | -0.103661 | 1.224883 | 0.350956 | 0 | 0 |
| 7 | -0.952702 | 1.078642 | -0.563379 | -0.018149 | -0.073042 | -0.591301 | -1.392389 | 0.209234 | 0.725065 | 0.064350 | 0.034449 | 0.581953 | 2.151966 | 1 | 0 |
| 8 | 0.046457 | -0.093025 | -0.804385 | 0.542662 | -0.130939 | 0.042792 | 1.198959 | -0.559116 | 0.017192 | -0.249308 | 0.747851 | -0.035599 | 0.995166 | 0 | 0 |
| 9 | -0.781158 | 0.099463 | 0.196737 | 2.462131 | 0.316140 | -0.369698 | 2.196715 | -0.800443 | 2.137687 | 1.438443 | 0.055279 | -0.284437 | 1.702942 | 0 | 0 |
| 10 | -0.906167 | 0.568017 | 0.700382 | 2.876646 | -0.809125 | -0.491839 | 1.801564 | -2.406947 | 1.939246 | 1.397556 | 0.709408 | -0.423394 | 1.773713 | 0 | 0 |
| 11 | 1.172687 | 1.292213 | -0.402038 | 0.087342 | 0.324539 | 0.973336 | -0.548282 | 0.781195 | 0.846038 | 0.464514 | -1.030463 | -0.559243 | 0.168727 | 0 | 0 |
| 12 | 0.367875 | 1.949889 | 0.516382 | 0.657124 | -0.534306 | 0.575187 | -0.750861 | 0.247200 | -0.232297 | 0.332174 | -0.426787 | 0.318763 | 0.083316 | 0 | 0 |
| 13 | 1.270520 | 1.194102 | 0.267933 | 0.676186 | 0.394734 | -0.709975 | -0.047626 | 1.113385 | 0.339962 | 0.424937 | -0.528480 | 0.671225 | 0.078062 | 0 | 0 |
| 14 | -0.095931 | 0.792392 | 0.626113 | 0.189989 | 0.315198 | -0.175744 | 0.011713 | -0.072196 | 0.742338 | 0.974567 | 0.935685 | 0.083454 | 0.970157 | 0 | 0 |
| 15 | -0.322645 | 0.977766 | 0.685697 | 0.670670 | 0.997903 | 0.619018 | 0.498110 | -0.016728 | 0.445370 | -0.102204 | 0.199517 | -0.315303 | 0.347920 | 0 | 0 |
| 16 | 0.565974 | 0.440551 | 0.402995 | 1.815814 | 1.906139 | 1.105013 | 1.256180 | 0.907086 | 0.592851 | -0.159427 | 1.013051 | -0.620202 | 1.259932 | 0 | 0 |
| 17 | -0.863540 | 0.887127 | 1.387720 | -0.082168 | -0.694633 | -0.810037 | 1.251697 | -0.443532 | 0.307506 | 0.253798 | -0.292483 | 0.030812 | 0.176350 | 0 | 0 |
| 18 | -0.822258 | -0.630193 | -0.672294 | -0.279417 | -0.731983 | -1.510167 | -1.393705 | -0.161872 | 0.722297 | 0.910604 | -0.610303 | 0.380547 | 1.296315 | 1 | 0 |
| 19 | -0.889164 | 0.641922 | 2.278761 | 0.190213 | -0.341231 | -0.624107 | 1.228820 | -0.549441 | -0.662942 | 0.481866 | -0.541347 | -1.061735 | -0.122227 | 2 | 0 |
| 20 | 0.795964 | 0.484784 | 0.898919 | 0.027625 | 0.415359 | 0.271286 | 0.366966 | -0.498975 | 0.300352 | 0.216702 | 0.361195 | -0.771976 | 0.085971 | 0 | 0 |
| 21 | 0.168183 | -0.077353 | 1.019887 | -0.637065 | 0.731534 | 0.877245 | 1.225125 | -0.566997 | -0.452222 | -1.105384 | 0.185636 | -0.782808 | -0.224975 | 2 | 0 |
| 22 | 0.510023 | -0.099060 | 0.064384 | -0.039933 | 0.786951 | 0.119530 | -0.259052 | -0.881354 | -0.113425 | 1.191274 | 0.335443 | -0.189618 | -0.337688 | 1 | 0 |
| 23 | 0.216210 | -0.069447 | 0.974822 | -0.626273 | 0.835854 | 0.914236 | 1.226463 | -0.369525 | -0.398299 | -1.146613 | 0.026274 | -0.944475 | -0.192948 | 2 | 0 |
| 24 | -0.239273 | -0.518568 | -0.127834 | 0.045011 | 0.403223 | 0.368253 | -0.584902 | -0.905436 | -0.405699 | 0.129383 | 0.809611 | -0.174138 | -0.115393 | 1 | 0 |
| 25 | -1.241907 | 1.355534 | -0.693470 | 0.793789 | 0.606007 | 0.930263 | 0.009323 | -0.712463 | 0.037916 | -0.182143 | 1.212760 | -0.083882 | 0.639662 | 0 | 0 |
| 26 | -0.847436 | 1.180146 | -0.489592 | 1.189572 | -0.457645 | -0.163979 | -0.010812 | -0.765561 | -0.347488 | -0.216575 | 0.804302 | -0.236378 | 0.481212 | 0 | 0 |
| 27 | -0.378383 | 1.017722 | -1.812001 | 0.443514 | 0.583209 | 1.709730 | 0.715521 | -0.076610 | 0.416120 | 0.013436 | 0.420025 | -0.925263 | 0.626400 | 0 | 0 |
| 28 | 0.245370 | 1.187084 | 1.056929 | 2.013063 | -0.505622 | 1.228583 | -1.158143 | 0.622932 | 0.113512 | 0.948397 | 0.008252 | 1.035839 | -0.691702 | 0 | 0 |
| 29 | -0.623386 | 1.368898 | 1.216933 | 1.961377 | 0.744541 | 1.555516 | -1.205283 | -0.252995 | -0.325624 | 0.538668 | 0.197646 | 0.356450 | -0.219812 | 0 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 189 | -0.565077 | 0.809784 | 0.557457 | 0.815038 | 0.823053 | -0.931359 | -0.039244 | -0.199068 | 0.083690 | -0.235063 | -0.030800 | -0.564557 | -0.253507 | 0 | 1 |
| 190 | -0.602848 | 0.638838 | 0.763481 | -0.424641 | -0.810302 | -0.951734 | -0.732024 | -0.504038 | 0.379372 | 0.748895 | -0.593820 | -0.772491 | 0.175752 | 1 | 1 |
| 191 | -1.094031 | -0.896961 | 0.400325 | -1.635971 | -1.099938 | -1.091799 | -0.593281 | 0.890889 | 0.984647 | 0.584509 | 0.318496 | 0.175062 | -0.783524 | 2 | 1 |
| 192 | -0.348357 | 0.944340 | 0.239675 | 0.003612 | -1.370450 | -0.996597 | -0.616405 | 0.161481 | -0.258760 | 0.534721 | -0.431338 | 0.376456 | -1.623026 | 1 | 1 |
| 193 | 2.110671 | -1.005236 | 0.268022 | 0.459390 | -1.985350 | 0.405677 | -0.361571 | -1.272053 | -0.873345 | 2.111218 | -0.246708 | 0.798456 | 1.067252 | 1 | 1 |
| 194 | 1.222194 | -1.600122 | -1.149302 | 0.230839 | -0.213026 | -1.572114 | 0.486447 | -0.770701 | 0.244895 | 2.689114 | -2.296486 | 0.718338 | -1.220356 | 1 | 1 |
| 195 | -0.509789 | -0.757711 | 0.189267 | 0.516644 | 0.750906 | -1.485714 | 2.485824 | -1.204754 | -3.373113 | -0.450016 | -1.091178 | -0.474728 | -0.522197 | 2 | 1 |
| 196 | 0.194175 | -0.618441 | -1.090420 | 0.233017 | -1.492602 | -0.342192 | -1.612833 | 0.714990 | 0.072755 | -0.026932 | 0.464029 | 0.212333 | 1.204262 | 1 | 1 |
| 197 | 0.297635 | -0.727616 | -1.927078 | -0.145347 | -0.990256 | 0.052935 | -1.791108 | -0.351333 | -0.064903 | 0.201842 | 1.581215 | 1.084453 | -0.168841 | 1 | 1 |
| 198 | -0.271030 | -0.575137 | -1.005334 | -0.238705 | -0.931830 | -1.319114 | -0.668613 | 0.510822 | 0.209623 | 0.487577 | 0.154874 | 0.133768 | 1.259548 | 1 | 1 |
| 199 | 0.059096 | -0.370313 | -0.760047 | 0.706270 | -2.488266 | -1.336692 | -0.683584 | 0.436366 | -0.150281 | -0.711308 | -0.851205 | 0.253942 | -0.052516 | 1 | 1 |
| 200 | 0.147539 | -0.233608 | -0.578016 | 0.870637 | -2.418094 | -1.286070 | -0.692623 | 0.342693 | 0.015890 | -0.795418 | -1.221248 | 0.309493 | -0.526480 | 1 | 1 |
| 201 | -0.076214 | -1.055629 | 0.159389 | -0.403318 | -0.111273 | -1.325990 | -0.867502 | 0.519381 | 0.192007 | -0.024629 | 0.220420 | 0.551046 | 0.399728 | 1 | 1 |
| 202 | 1.468986 | 0.518464 | 1.475456 | -1.400891 | 0.408186 | -1.831201 | 1.474742 | 0.566660 | -0.403197 | -1.295176 | -0.443787 | -1.884346 | -1.993491 | 2 | 1 |
| 203 | -1.739107 | 0.192104 | -0.670709 | -1.236237 | -1.672915 | -0.680127 | 0.027148 | 0.524909 | 1.865754 | -0.634310 | -0.607429 | -1.471191 | -0.632982 | 2 | 1 |
| 204 | -0.663868 | -0.862566 | -0.329803 | -0.857680 | 0.167824 | -0.013328 | 0.176565 | 0.125832 | 0.609671 | -1.296827 | -0.435986 | -1.341223 | -0.977207 | 2 | 1 |
| 205 | -0.739818 | -0.668220 | -0.077479 | 0.026286 | 0.027801 | 0.040659 | -0.161646 | -1.046948 | -1.248976 | -0.449243 | 1.046834 | 1.381194 | 1.646325 | 1 | 1 |
| 206 | 0.475752 | 0.695473 | -0.072097 | 1.081397 | -0.366985 | -2.008080 | 0.515734 | 0.005330 | 1.193800 | -0.841825 | -2.650200 | -3.862624 | -2.115507 | 2 | 1 |
| 207 | -1.331365 | -1.632552 | -0.876636 | 0.076190 | 1.187799 | 1.138590 | 1.235955 | 1.583447 | 0.890342 | -1.587964 | 0.546109 | 1.565567 | 1.756993 | 0 | 1 |
| 208 | -0.397476 | 0.090963 | 1.217996 | 0.773741 | 1.107204 | -1.125870 | -0.915396 | -1.130561 | -1.914456 | -0.664474 | -0.226576 | 0.112420 | 0.235011 | 1 | 1 |
| 209 | -0.465823 | -1.372705 | -0.445436 | 0.316510 | -1.492946 | -1.103783 | 0.353513 | -0.311377 | -1.095388 | -0.615078 | -0.585868 | 0.172807 | -0.860564 | 1 | 1 |
| 210 | -0.594535 | -1.761364 | -1.069906 | -0.502969 | -1.411276 | -0.906350 | -0.559102 | -1.240920 | -2.254196 | -1.206339 | -0.528047 | 0.924112 | 0.472298 | 1 | 1 |
| 211 | -1.022693 | 0.373374 | -0.104205 | -0.815628 | -0.574733 | 0.906934 | 0.765114 | -0.015386 | 0.110695 | 1.832325 | 0.712557 | -0.951976 | -0.678869 | 2 | 1 |
| 212 | -0.967902 | 0.155275 | 0.013938 | -0.549105 | -0.907792 | 0.881907 | 0.609589 | -0.135010 | -0.373473 | 1.152134 | 0.386511 | -0.744687 | -0.447017 | 2 | 1 |
| 213 | -1.238242 | -0.062983 | -0.133082 | -0.158458 | -0.338086 | -0.411874 | 0.964537 | 0.870379 | 0.530337 | 0.858339 | 0.489332 | -1.190977 | -1.340484 | 2 | 1 |
| 214 | 0.349761 | -1.391267 | -3.069473 | 0.840195 | 1.044391 | -1.052018 | 1.004856 | 1.478511 | 1.210060 | -1.145325 | 2.653757 | 1.937234 | 0.592139 | 0 | 1 |
| 215 | 0.782819 | -1.300386 | -0.487318 | 0.850960 | -2.046427 | 1.050631 | 0.289069 | 2.400271 | 2.707288 | -0.278238 | 0.152360 | 1.912210 | -0.208225 | 0 | 1 |
| 216 | 1.847553 | -1.059174 | -0.808403 | 0.400706 | -0.275009 | 0.409744 | -0.141885 | 0.706348 | 0.476002 | 0.990111 | -0.168504 | 0.856440 | -0.395652 | 1 | 1 |
| 217 | 2.608478 | 0.174234 | 2.534211 | -0.985597 | -0.436400 | 3.751943 | 1.560179 | -2.367095 | 1.272529 | 2.464209 | -0.954336 | 0.310720 | -1.209456 | 0 | 1 |
| 218 | -0.069569 | 0.418008 | -0.004324 | 1.330358 | 0.365352 | -0.582788 | -0.527444 | -0.298114 | -0.353021 | -1.118883 | -0.459230 | -0.986241 | -0.041010 | 1 | 1 |
219 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82d379160>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[5]))
X = df_n_ps_std_mfcc[5].drop(columns='Cluster')
y = df_n_ps[5]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(168, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'logistic', 'hidden_layer_sizes': (20,), 'learning_rate_init': 0.009, 'max_iter': 1000}, que permiten obtener un Accuracy de 73.81% y un Kappa del 36.33
Tiempo total: 24.57 minutos
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_6" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_6 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_18 (Dense) (None, 20) 280 _________________________________________________________________ dense_19 (Dense) (None, 1) 21 ================================================================= Total params: 301 Trainable params: 301 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 168 samples, validate on 57 samples Epoch 1/1000 168/168 [==============================] - 0s 767us/step - loss: 0.7409 - accuracy: 0.4524 - val_loss: 0.7005 - val_accuracy: 0.5263 Epoch 2/1000 168/168 [==============================] - 0s 77us/step - loss: 0.6410 - accuracy: 0.6429 - val_loss: 0.6336 - val_accuracy: 0.6316 Epoch 3/1000 168/168 [==============================] - 0s 83us/step - loss: 0.5912 - accuracy: 0.7024 - val_loss: 0.6007 - val_accuracy: 0.6491 Epoch 4/1000 168/168 [==============================] - 0s 89us/step - loss: 0.5613 - accuracy: 0.6905 - val_loss: 0.5951 - val_accuracy: 0.6491 Epoch 5/1000 168/168 [==============================] - 0s 83us/step - loss: 0.5452 - accuracy: 0.6964 - val_loss: 0.5939 - val_accuracy: 0.6667 Epoch 6/1000 168/168 [==============================] - 0s 59us/step - loss: 0.5329 - accuracy: 0.7202 - val_loss: 0.5949 - val_accuracy: 0.6491 Epoch 7/1000 168/168 [==============================] - 0s 65us/step - loss: 0.5235 - accuracy: 0.7202 - val_loss: 0.6057 - val_accuracy: 0.6667 Epoch 8/1000 168/168 [==============================] - 0s 59us/step - loss: 0.5187 - accuracy: 0.7440 - val_loss: 0.6144 - val_accuracy: 0.6316 Epoch 9/1000 168/168 [==============================] - 0s 65us/step - loss: 0.5105 - accuracy: 0.7381 - val_loss: 0.6141 - val_accuracy: 0.6491 Epoch 10/1000 168/168 [==============================] - 0s 65us/step - loss: 0.5042 - accuracy: 0.7560 - val_loss: 0.6174 - val_accuracy: 0.6491 Epoch 11/1000 168/168 [==============================] - 0s 71us/step - loss: 0.4950 - accuracy: 0.7560 - val_loss: 0.6176 - val_accuracy: 0.6491 Epoch 12/1000 168/168 [==============================] - 0s 59us/step - loss: 0.4889 - accuracy: 0.7500 - val_loss: 0.6170 - val_accuracy: 0.6491 Epoch 13/1000 168/168 [==============================] - 0s 65us/step - loss: 0.4805 - accuracy: 0.7679 - val_loss: 0.6190 - val_accuracy: 0.6491 Epoch 14/1000 168/168 [==============================] - 0s 59us/step - loss: 0.4721 - accuracy: 0.7738 - val_loss: 0.6194 - val_accuracy: 0.6491 Epoch 15/1000 168/168 [==============================] - 0s 59us/step - loss: 0.4649 - accuracy: 0.7798 - val_loss: 0.6190 - val_accuracy: 0.6491 Epoch 00015: ReduceLROnPlateau reducing learning rate to 0.0044999998062849045. Epoch 16/1000 168/168 [==============================] - 0s 59us/step - loss: 0.4584 - accuracy: 0.7798 - val_loss: 0.6213 - val_accuracy: 0.6491 Epoch 17/1000 168/168 [==============================] - 0s 59us/step - loss: 0.4553 - accuracy: 0.7798 - val_loss: 0.6213 - val_accuracy: 0.6491 Epoch 18/1000 168/168 [==============================] - 0s 65us/step - loss: 0.4508 - accuracy: 0.7798 - val_loss: 0.6222 - val_accuracy: 0.6491 Epoch 19/1000 168/168 [==============================] - 0s 71us/step - loss: 0.4467 - accuracy: 0.7738 - val_loss: 0.6233 - val_accuracy: 0.6491 Epoch 20/1000 168/168 [==============================] - 0s 71us/step - loss: 0.4423 - accuracy: 0.7857 - val_loss: 0.6245 - val_accuracy: 0.6491 Epoch 21/1000 168/168 [==============================] - 0s 65us/step - loss: 0.4381 - accuracy: 0.7917 - val_loss: 0.6232 - val_accuracy: 0.6491 Epoch 22/1000 168/168 [==============================] - 0s 101us/step - loss: 0.4335 - accuracy: 0.7917 - val_loss: 0.6160 - val_accuracy: 0.6491 Epoch 23/1000 168/168 [==============================] - 0s 71us/step - loss: 0.4298 - accuracy: 0.7976 - val_loss: 0.6142 - val_accuracy: 0.6667 Epoch 24/1000 168/168 [==============================] - 0s 71us/step - loss: 0.4257 - accuracy: 0.8095 - val_loss: 0.6135 - val_accuracy: 0.6667 Epoch 25/1000 168/168 [==============================] - 0s 65us/step - loss: 0.4219 - accuracy: 0.8155 - val_loss: 0.6148 - val_accuracy: 0.6491 Epoch 00025: ReduceLROnPlateau reducing learning rate to 0.0022499999031424522. Epoch 26/1000 168/168 [==============================] - 0s 65us/step - loss: 0.4179 - accuracy: 0.8155 - val_loss: 0.6160 - val_accuracy: 0.6667 Epoch 27/1000 168/168 [==============================] - 0s 71us/step - loss: 0.4160 - accuracy: 0.8214 - val_loss: 0.6163 - val_accuracy: 0.6667 Epoch 28/1000 168/168 [==============================] - 0s 65us/step - loss: 0.4142 - accuracy: 0.8274 - val_loss: 0.6172 - val_accuracy: 0.6667 Epoch 29/1000 168/168 [==============================] - 0s 65us/step - loss: 0.4122 - accuracy: 0.8274 - val_loss: 0.6182 - val_accuracy: 0.6667 Epoch 30/1000 168/168 [==============================] - 0s 71us/step - loss: 0.4098 - accuracy: 0.8274 - val_loss: 0.6202 - val_accuracy: 0.6667 Epoch 31/1000 168/168 [==============================] - 0s 59us/step - loss: 0.4077 - accuracy: 0.8274 - val_loss: 0.6221 - val_accuracy: 0.6667 Epoch 32/1000 168/168 [==============================] - 0s 77us/step - loss: 0.4055 - accuracy: 0.8274 - val_loss: 0.6252 - val_accuracy: 0.6491 Epoch 33/1000 168/168 [==============================] - 0s 71us/step - loss: 0.4037 - accuracy: 0.8155 - val_loss: 0.6261 - val_accuracy: 0.6491 Epoch 34/1000 168/168 [==============================] - 0s 59us/step - loss: 0.4013 - accuracy: 0.8214 - val_loss: 0.6267 - val_accuracy: 0.6491 Epoch 35/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3991 - accuracy: 0.8214 - val_loss: 0.6266 - val_accuracy: 0.6491 Epoch 00035: ReduceLROnPlateau reducing learning rate to 0.0011249999515712261. Epoch 36/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3970 - accuracy: 0.8214 - val_loss: 0.6259 - val_accuracy: 0.6491 Epoch 37/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3957 - accuracy: 0.8214 - val_loss: 0.6258 - val_accuracy: 0.6491 Epoch 38/1000 168/168 [==============================] - 0s 54us/step - loss: 0.3946 - accuracy: 0.8214 - val_loss: 0.6257 - val_accuracy: 0.6491 Epoch 39/1000 168/168 [==============================] - 0s 59us/step - loss: 0.3940 - accuracy: 0.8214 - val_loss: 0.6264 - val_accuracy: 0.6491 Epoch 40/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3922 - accuracy: 0.8214 - val_loss: 0.6264 - val_accuracy: 0.6667 Epoch 41/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3911 - accuracy: 0.8214 - val_loss: 0.6260 - val_accuracy: 0.6667 Epoch 42/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3900 - accuracy: 0.8214 - val_loss: 0.6263 - val_accuracy: 0.6667 Epoch 43/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3891 - accuracy: 0.8214 - val_loss: 0.6270 - val_accuracy: 0.6667 Epoch 44/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3881 - accuracy: 0.8214 - val_loss: 0.6279 - val_accuracy: 0.6491 Epoch 45/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3866 - accuracy: 0.8214 - val_loss: 0.6281 - val_accuracy: 0.6491 Epoch 00045: ReduceLROnPlateau reducing learning rate to 0.0005624999757856131. Epoch 46/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3857 - accuracy: 0.8274 - val_loss: 0.6285 - val_accuracy: 0.6667 Epoch 47/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3851 - accuracy: 0.8274 - val_loss: 0.6289 - val_accuracy: 0.6491 Epoch 48/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3843 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6491 Epoch 49/1000 168/168 [==============================] - 0s 59us/step - loss: 0.3837 - accuracy: 0.8214 - val_loss: 0.6292 - val_accuracy: 0.6491 Epoch 50/1000 168/168 [==============================] - 0s 59us/step - loss: 0.3833 - accuracy: 0.8214 - val_loss: 0.6292 - val_accuracy: 0.6491 Epoch 51/1000 168/168 [==============================] - 0s 59us/step - loss: 0.3826 - accuracy: 0.8214 - val_loss: 0.6293 - val_accuracy: 0.6491 Epoch 52/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3821 - accuracy: 0.8214 - val_loss: 0.6292 - val_accuracy: 0.6491 Epoch 53/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3815 - accuracy: 0.8214 - val_loss: 0.6294 - val_accuracy: 0.6491 Epoch 54/1000 168/168 [==============================] - 0s 59us/step - loss: 0.3808 - accuracy: 0.8214 - val_loss: 0.6294 - val_accuracy: 0.6667 Epoch 55/1000 168/168 [==============================] - 0s 59us/step - loss: 0.3803 - accuracy: 0.8214 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00055: ReduceLROnPlateau reducing learning rate to 0.00028124998789280653. Epoch 56/1000 168/168 [==============================] - 0s 71us/step - loss: 0.3797 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6491 Epoch 57/1000 168/168 [==============================] - 0s 54us/step - loss: 0.3794 - accuracy: 0.8274 - val_loss: 0.6293 - val_accuracy: 0.6491 Epoch 58/1000 168/168 [==============================] - 0s 71us/step - loss: 0.3792 - accuracy: 0.8274 - val_loss: 0.6294 - val_accuracy: 0.6491 Epoch 59/1000 168/168 [==============================] - 0s 59us/step - loss: 0.3789 - accuracy: 0.8274 - val_loss: 0.6296 - val_accuracy: 0.6667 Epoch 60/1000 168/168 [==============================] - 0s 59us/step - loss: 0.3786 - accuracy: 0.8274 - val_loss: 0.6296 - val_accuracy: 0.6667 Epoch 61/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3783 - accuracy: 0.8274 - val_loss: 0.6294 - val_accuracy: 0.6667 Epoch 62/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3780 - accuracy: 0.8274 - val_loss: 0.6293 - val_accuracy: 0.6667 Epoch 63/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3777 - accuracy: 0.8274 - val_loss: 0.6294 - val_accuracy: 0.6491 Epoch 64/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3774 - accuracy: 0.8274 - val_loss: 0.6295 - val_accuracy: 0.6491 Epoch 65/1000 168/168 [==============================] - 0s 59us/step - loss: 0.3771 - accuracy: 0.8274 - val_loss: 0.6295 - val_accuracy: 0.6491 Epoch 00065: ReduceLROnPlateau reducing learning rate to 0.00014062499394640326. Epoch 66/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3769 - accuracy: 0.8274 - val_loss: 0.6296 - val_accuracy: 0.6491 Epoch 67/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3767 - accuracy: 0.8274 - val_loss: 0.6295 - val_accuracy: 0.6491 Epoch 68/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3766 - accuracy: 0.8274 - val_loss: 0.6295 - val_accuracy: 0.6491 Epoch 69/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3764 - accuracy: 0.8274 - val_loss: 0.6295 - val_accuracy: 0.6491 Epoch 70/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3763 - accuracy: 0.8274 - val_loss: 0.6295 - val_accuracy: 0.6491 Epoch 71/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3761 - accuracy: 0.8274 - val_loss: 0.6295 - val_accuracy: 0.6491 Epoch 72/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3760 - accuracy: 0.8274 - val_loss: 0.6295 - val_accuracy: 0.6491 Epoch 73/1000 168/168 [==============================] - 0s 71us/step - loss: 0.3759 - accuracy: 0.8274 - val_loss: 0.6295 - val_accuracy: 0.6491 Epoch 74/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3757 - accuracy: 0.8274 - val_loss: 0.6294 - val_accuracy: 0.6491 Epoch 75/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3756 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00075: ReduceLROnPlateau reducing learning rate to 7.031249697320163e-05. Epoch 76/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3754 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 77/1000 168/168 [==============================] - 0s 65us/step - loss: 0.3754 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 78/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3753 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 79/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3752 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 80/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3751 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 81/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3751 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 82/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3750 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 83/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3749 - accuracy: 0.8274 - val_loss: 0.6290 - val_accuracy: 0.6667 Epoch 84/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3748 - accuracy: 0.8274 - val_loss: 0.6290 - val_accuracy: 0.6667 Epoch 85/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3748 - accuracy: 0.8274 - val_loss: 0.6290 - val_accuracy: 0.6667 Epoch 00085: ReduceLROnPlateau reducing learning rate to 3.5156248486600816e-05. Epoch 86/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3747 - accuracy: 0.8274 - val_loss: 0.6290 - val_accuracy: 0.6667 Epoch 87/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3747 - accuracy: 0.8274 - val_loss: 0.6290 - val_accuracy: 0.6667 Epoch 88/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3746 - accuracy: 0.8274 - val_loss: 0.6290 - val_accuracy: 0.6667 Epoch 89/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3746 - accuracy: 0.8274 - val_loss: 0.6290 - val_accuracy: 0.6667 Epoch 90/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3746 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 91/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3745 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 92/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3745 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 93/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3744 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 94/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3744 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 95/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3744 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 00095: ReduceLROnPlateau reducing learning rate to 1.7578124243300408e-05. Epoch 96/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3743 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 97/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3743 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 98/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3743 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 99/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3743 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 100/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3743 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 101/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3743 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 102/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3742 - accuracy: 0.8274 - val_loss: 0.6291 - val_accuracy: 0.6667 Epoch 103/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3742 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 104/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3742 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 105/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3742 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00105: ReduceLROnPlateau reducing learning rate to 8.789062121650204e-06. Epoch 106/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3742 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 107/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3742 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 108/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 109/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 110/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 111/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 112/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 113/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 114/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 115/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00115: ReduceLROnPlateau reducing learning rate to 4.394531060825102e-06. Epoch 116/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 117/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 118/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 119/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 120/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3741 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 121/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 122/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 123/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 124/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 125/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00125: ReduceLROnPlateau reducing learning rate to 2.197265530412551e-06. Epoch 126/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 127/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 128/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 129/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 130/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 131/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 132/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 133/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 134/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 135/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00135: ReduceLROnPlateau reducing learning rate to 1.0986327652062755e-06. Epoch 136/1000 168/168 [==============================] - 0s 137us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 137/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 138/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 139/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 140/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 141/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 142/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 143/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 144/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 145/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00145: ReduceLROnPlateau reducing learning rate to 5.493163826031378e-07. Epoch 146/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 147/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 148/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 149/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 150/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 151/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 152/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 153/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 154/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 155/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00155: ReduceLROnPlateau reducing learning rate to 2.746581913015689e-07. Epoch 156/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 157/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 158/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 159/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 160/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 161/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 162/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 163/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 164/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 165/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00165: ReduceLROnPlateau reducing learning rate to 1.3732909565078444e-07. Epoch 166/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 167/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 168/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 169/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 170/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 171/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 172/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 173/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 174/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 175/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00175: ReduceLROnPlateau reducing learning rate to 6.866454782539222e-08. Epoch 176/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 177/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 178/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 179/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 180/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 181/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 182/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 183/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 184/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 185/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00185: ReduceLROnPlateau reducing learning rate to 3.433227391269611e-08. Epoch 186/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 187/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 188/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 189/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 190/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 191/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 192/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 193/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 194/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 195/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00195: ReduceLROnPlateau reducing learning rate to 1.7166136956348055e-08. Epoch 196/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 197/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 198/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 199/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 200/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 201/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 202/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 203/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 204/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 205/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00205: ReduceLROnPlateau reducing learning rate to 8.583068478174027e-09. Epoch 206/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 207/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 208/1000 168/168 [==============================] - ETA: 0s - loss: 0.4425 - accuracy: 0.71 - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 209/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 210/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 211/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 212/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 213/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 214/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 215/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00215: ReduceLROnPlateau reducing learning rate to 4.291534239087014e-09. Epoch 216/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 217/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 218/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 219/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 220/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 221/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 222/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 223/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 224/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 225/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00225: ReduceLROnPlateau reducing learning rate to 2.145767119543507e-09. Epoch 226/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 227/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 228/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 229/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 230/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 231/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 232/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 233/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 234/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 235/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00235: ReduceLROnPlateau reducing learning rate to 1.0728835597717534e-09. Epoch 236/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 237/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 238/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 239/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 240/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 241/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 242/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 243/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 244/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 245/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00245: ReduceLROnPlateau reducing learning rate to 5.364417798858767e-10. Epoch 246/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 247/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 248/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 249/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 250/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 251/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 252/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 253/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 254/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 255/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00255: ReduceLROnPlateau reducing learning rate to 2.6822088994293836e-10. Epoch 256/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 257/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 258/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 259/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 260/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 261/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 262/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 263/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 264/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 265/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00265: ReduceLROnPlateau reducing learning rate to 1.3411044497146918e-10. Epoch 266/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 267/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 268/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 269/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 270/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 271/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 272/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 273/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 274/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 275/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00275: ReduceLROnPlateau reducing learning rate to 6.705522248573459e-11. Epoch 276/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 277/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 278/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 279/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 280/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 281/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 282/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 283/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 284/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 285/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00285: ReduceLROnPlateau reducing learning rate to 3.3527611242867295e-11. Epoch 286/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 287/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 288/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 289/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 290/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 291/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 292/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 293/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 294/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 295/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00295: ReduceLROnPlateau reducing learning rate to 1.6763805621433647e-11. Epoch 296/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 297/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 298/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 299/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 300/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 301/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 302/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 303/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 304/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 305/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00305: ReduceLROnPlateau reducing learning rate to 8.381902810716824e-12. Epoch 306/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 307/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 308/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 309/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 310/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 311/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 312/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 313/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 314/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 315/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00315: ReduceLROnPlateau reducing learning rate to 4.190951405358412e-12. Epoch 316/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 317/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 318/1000 168/168 [==============================] - 0s 149us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 319/1000 168/168 [==============================] - ETA: 0s - loss: 0.3350 - accuracy: 0.87 - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 320/1000 168/168 [==============================] - 0s 143us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 321/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 322/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 323/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 324/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 325/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00325: ReduceLROnPlateau reducing learning rate to 2.095475702679206e-12. Epoch 326/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 327/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 328/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 329/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 330/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 331/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 332/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 333/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 334/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 335/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00335: ReduceLROnPlateau reducing learning rate to 1.047737851339603e-12. Epoch 336/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 337/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 338/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 339/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 340/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 341/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 342/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 343/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 344/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 345/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00345: ReduceLROnPlateau reducing learning rate to 5.238689256698015e-13. Epoch 346/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 347/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 348/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 349/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 350/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 351/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 352/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 353/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 354/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 355/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00355: ReduceLROnPlateau reducing learning rate to 2.6193446283490074e-13. Epoch 356/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 357/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 358/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 359/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 360/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 361/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 362/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 363/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 364/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 365/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00365: ReduceLROnPlateau reducing learning rate to 1.3096723141745037e-13. Epoch 366/1000 168/168 [==============================] - 0s 196us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 367/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 368/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 369/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 370/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 371/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 372/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 373/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 374/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 375/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00375: ReduceLROnPlateau reducing learning rate to 6.548361570872518e-14. Epoch 376/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 377/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 378/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 379/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 380/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 381/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 382/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 383/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 384/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 385/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00385: ReduceLROnPlateau reducing learning rate to 3.274180785436259e-14. Epoch 386/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 387/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 388/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 389/1000 168/168 [==============================] - 0s 137us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 390/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 391/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 392/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 393/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 394/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 395/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00395: ReduceLROnPlateau reducing learning rate to 1.6370903927181296e-14. Epoch 396/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 397/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 398/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 399/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 400/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 401/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 402/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 403/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 404/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 405/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00405: ReduceLROnPlateau reducing learning rate to 8.185451963590648e-15. Epoch 406/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 407/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 408/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 409/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 410/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 411/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 412/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 413/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 414/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 415/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00415: ReduceLROnPlateau reducing learning rate to 4.092725981795324e-15. Epoch 416/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 417/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 418/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 419/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 420/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 421/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 422/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 423/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 424/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 425/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00425: ReduceLROnPlateau reducing learning rate to 2.046362990897662e-15. Epoch 426/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 427/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 428/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 429/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 430/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 431/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 432/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 433/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 434/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 435/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00435: ReduceLROnPlateau reducing learning rate to 1.023181495448831e-15. Epoch 436/1000 168/168 [==============================] - 0s 143us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 437/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 438/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 439/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 440/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 441/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 442/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 443/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 444/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 445/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00445: ReduceLROnPlateau reducing learning rate to 5.115907477244155e-16. Epoch 446/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 447/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 448/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 449/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 450/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 451/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 452/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 453/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 454/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 455/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00455: ReduceLROnPlateau reducing learning rate to 2.5579537386220775e-16. Epoch 456/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 457/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 458/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 459/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 460/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 461/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 462/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 463/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 464/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 465/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00465: ReduceLROnPlateau reducing learning rate to 1.2789768693110388e-16. Epoch 466/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 467/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 468/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 469/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 470/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 471/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 472/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 473/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 474/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 475/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00475: ReduceLROnPlateau reducing learning rate to 6.394884346555194e-17. Epoch 476/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 477/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 478/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 479/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 480/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 481/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 482/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 483/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 484/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 485/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00485: ReduceLROnPlateau reducing learning rate to 3.197442173277597e-17. Epoch 486/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 487/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 488/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 489/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 490/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 491/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 492/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 493/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 494/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 495/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00495: ReduceLROnPlateau reducing learning rate to 1.5987210866387985e-17. Epoch 496/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 497/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 498/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 499/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 500/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 501/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 502/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 503/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 504/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 505/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00505: ReduceLROnPlateau reducing learning rate to 7.993605433193992e-18. Epoch 506/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 507/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 508/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 509/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 510/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 511/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 512/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 513/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 514/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 515/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00515: ReduceLROnPlateau reducing learning rate to 3.996802716596996e-18. Epoch 516/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 517/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 518/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 519/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 520/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 521/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 522/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 523/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 524/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 525/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00525: ReduceLROnPlateau reducing learning rate to 1.998401358298498e-18. Epoch 526/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 527/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 528/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 529/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 530/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 531/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 532/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 533/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 534/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 535/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00535: ReduceLROnPlateau reducing learning rate to 9.99200679149249e-19. Epoch 536/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 537/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 538/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 539/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 540/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 541/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 542/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 543/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 544/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 545/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00545: ReduceLROnPlateau reducing learning rate to 4.996003395746245e-19. Epoch 546/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 547/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 548/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 549/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 550/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 551/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 552/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 553/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 554/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 555/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00555: ReduceLROnPlateau reducing learning rate to 2.4980016978731226e-19. Epoch 556/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 557/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 558/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 559/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 560/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 561/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 562/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 563/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 564/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 565/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00565: ReduceLROnPlateau reducing learning rate to 1.2490008489365613e-19. Epoch 566/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 567/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 568/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 569/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 570/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 571/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 572/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 573/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 574/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 575/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00575: ReduceLROnPlateau reducing learning rate to 6.245004244682806e-20. Epoch 576/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 577/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 578/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 579/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 580/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 581/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 582/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 583/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 584/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 585/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00585: ReduceLROnPlateau reducing learning rate to 3.122502122341403e-20. Epoch 586/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 587/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 588/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 589/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 590/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 591/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 592/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 593/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 594/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 595/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00595: ReduceLROnPlateau reducing learning rate to 1.5612510611707016e-20. Epoch 596/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 597/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 598/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 599/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 600/1000 168/168 [==============================] - 0s 131us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 601/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 602/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 603/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 604/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 605/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00605: ReduceLROnPlateau reducing learning rate to 7.806255305853508e-21. Epoch 606/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 607/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 608/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 609/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 610/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 611/1000 168/168 [==============================] - 0s 155us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 612/1000 168/168 [==============================] - 0s 149us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 613/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 614/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 615/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00615: ReduceLROnPlateau reducing learning rate to 3.903127652926754e-21. Epoch 616/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 617/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 618/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 619/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 620/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 621/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 622/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 623/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 624/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 625/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00625: ReduceLROnPlateau reducing learning rate to 1.951563826463377e-21. Epoch 626/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 627/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 628/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 629/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 630/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 631/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 632/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 633/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 634/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 635/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00635: ReduceLROnPlateau reducing learning rate to 9.757819132316885e-22. Epoch 636/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 637/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 638/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 639/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 640/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 641/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 642/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 643/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 644/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 645/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00645: ReduceLROnPlateau reducing learning rate to 4.878909566158443e-22. Epoch 646/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 647/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 648/1000 168/168 [==============================] - ETA: 0s - loss: 0.4081 - accuracy: 0.78 - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 649/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 650/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 651/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 652/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 653/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 654/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 655/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00655: ReduceLROnPlateau reducing learning rate to 2.4394547830792213e-22. Epoch 656/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 657/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 658/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 659/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 660/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 661/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 662/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 663/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 664/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 665/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00665: ReduceLROnPlateau reducing learning rate to 1.2197273915396106e-22. Epoch 666/1000 168/168 [==============================] - 0s 143us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 667/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 668/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 669/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 670/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 671/1000 168/168 [==============================] - 0s 184us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 672/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 673/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 674/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 675/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00675: ReduceLROnPlateau reducing learning rate to 6.098636957698053e-23. Epoch 676/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 677/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 678/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 679/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 680/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 681/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 682/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 683/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 684/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 685/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00685: ReduceLROnPlateau reducing learning rate to 3.0493184788490266e-23. Epoch 686/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 687/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 688/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 689/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 690/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 691/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 692/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 693/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 694/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 695/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00695: ReduceLROnPlateau reducing learning rate to 1.5246592394245133e-23. Epoch 696/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 697/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 698/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 699/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 700/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 701/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 702/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 703/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 704/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 705/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00705: ReduceLROnPlateau reducing learning rate to 7.623296197122566e-24. Epoch 706/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 707/1000 168/168 [==============================] - ETA: 0s - loss: 0.3427 - accuracy: 0.90 - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 708/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 709/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 710/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 711/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 712/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 713/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 714/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 715/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00715: ReduceLROnPlateau reducing learning rate to 3.811648098561283e-24. Epoch 716/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 717/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 718/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 719/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 720/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 721/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 722/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 723/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 724/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 725/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00725: ReduceLROnPlateau reducing learning rate to 1.9058240492806416e-24. Epoch 726/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 727/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 728/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 729/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 730/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 731/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 732/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 733/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 734/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 735/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00735: ReduceLROnPlateau reducing learning rate to 9.529120246403208e-25. Epoch 736/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 737/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 738/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 739/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 740/1000 168/168 [==============================] - 0s 125us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 741/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 742/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 743/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 744/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 745/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00745: ReduceLROnPlateau reducing learning rate to 4.764560123201604e-25. Epoch 746/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 747/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 748/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 749/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 750/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 751/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 752/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 753/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 754/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 755/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00755: ReduceLROnPlateau reducing learning rate to 2.382280061600802e-25. Epoch 756/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 757/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 758/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 759/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 760/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 761/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 762/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 763/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 764/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 765/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00765: ReduceLROnPlateau reducing learning rate to 1.191140030800401e-25. Epoch 766/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 767/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 768/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 769/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 770/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 771/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 772/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 773/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 774/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 775/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00775: ReduceLROnPlateau reducing learning rate to 5.955700154002005e-26. Epoch 776/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 777/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 778/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 779/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 780/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 781/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 782/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 783/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 784/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 785/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00785: ReduceLROnPlateau reducing learning rate to 2.9778500770010025e-26. Epoch 786/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 787/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 788/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 789/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 790/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 791/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 792/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 793/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 794/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 795/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00795: ReduceLROnPlateau reducing learning rate to 1.4889250385005013e-26. Epoch 796/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 797/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 798/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 799/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 800/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 801/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 802/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 803/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 804/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 805/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00805: ReduceLROnPlateau reducing learning rate to 7.444625192502506e-27. Epoch 806/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 807/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 808/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 809/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 810/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 811/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 812/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 813/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 814/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 815/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00815: ReduceLROnPlateau reducing learning rate to 3.722312596251253e-27. Epoch 816/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 817/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 818/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 819/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 820/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 821/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 822/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 823/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 824/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 825/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00825: ReduceLROnPlateau reducing learning rate to 1.8611562981256266e-27. Epoch 826/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 827/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 828/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 829/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 830/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 831/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 832/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 833/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 834/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 835/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00835: ReduceLROnPlateau reducing learning rate to 9.305781490628133e-28. Epoch 836/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 837/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 838/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 839/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 840/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 841/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 842/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 843/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 844/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 845/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00845: ReduceLROnPlateau reducing learning rate to 4.6528907453140665e-28. Epoch 846/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 847/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 848/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 849/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 850/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 851/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 852/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 853/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 854/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 855/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00855: ReduceLROnPlateau reducing learning rate to 2.3264453726570332e-28. Epoch 856/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 857/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 858/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 859/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 860/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 861/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 862/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 863/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 864/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 865/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00865: ReduceLROnPlateau reducing learning rate to 1.1632226863285166e-28. Epoch 866/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 867/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 868/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 869/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 870/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 871/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 872/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 873/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 874/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 875/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00875: ReduceLROnPlateau reducing learning rate to 5.816113431642583e-29. Epoch 876/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 877/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 878/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 879/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 880/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 881/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 882/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 883/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 884/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 885/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00885: ReduceLROnPlateau reducing learning rate to 2.9080567158212915e-29. Epoch 886/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 887/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 888/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 889/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 890/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 891/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 892/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 893/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 894/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 895/1000 168/168 [==============================] - 0s 119us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00895: ReduceLROnPlateau reducing learning rate to 1.4540283579106458e-29. Epoch 896/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 897/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 898/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 899/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 900/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 901/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 902/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 903/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 904/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 905/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00905: ReduceLROnPlateau reducing learning rate to 7.270141789553229e-30. Epoch 906/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 907/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 908/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 909/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 910/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 911/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 912/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 913/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 914/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 915/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00915: ReduceLROnPlateau reducing learning rate to 3.6350708947766144e-30. Epoch 916/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 917/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 918/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 919/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 920/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 921/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 922/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 923/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 924/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 925/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00925: ReduceLROnPlateau reducing learning rate to 1.8175354473883072e-30. Epoch 926/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 927/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 928/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 929/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 930/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 931/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 932/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 933/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 934/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 935/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00935: ReduceLROnPlateau reducing learning rate to 9.087677236941536e-31. Epoch 936/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 937/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 938/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 939/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 940/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 941/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 942/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 943/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 944/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 945/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00945: ReduceLROnPlateau reducing learning rate to 4.543838618470768e-31. Epoch 946/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 947/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 948/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 949/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 950/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 951/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 952/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 953/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 954/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 955/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00955: ReduceLROnPlateau reducing learning rate to 2.271919309235384e-31. Epoch 956/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 957/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 958/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 959/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 960/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 961/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 962/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 963/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 964/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 965/1000 168/168 [==============================] - 0s 101us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00965: ReduceLROnPlateau reducing learning rate to 1.135959654617692e-31. Epoch 966/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 967/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 968/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 969/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 970/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 971/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 972/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 973/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 974/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 975/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00975: ReduceLROnPlateau reducing learning rate to 5.67979827308846e-32. Epoch 976/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 977/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 978/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 979/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 980/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 981/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 982/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 983/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 984/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 985/1000 168/168 [==============================] - 0s 95us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00985: ReduceLROnPlateau reducing learning rate to 2.83989913654423e-32. Epoch 986/1000 168/168 [==============================] - 0s 77us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 987/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 988/1000 168/168 [==============================] - 0s 113us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 989/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 990/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 991/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 992/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 993/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 994/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 995/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 00995: ReduceLROnPlateau reducing learning rate to 1.419949568272115e-32. Epoch 996/1000 168/168 [==============================] - 0s 83us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 997/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 998/1000 168/168 [==============================] - 0s 89us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 999/1000 168/168 [==============================] - 0s 107us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667 Epoch 1000/1000 168/168 [==============================] - 0s 232us/step - loss: 0.3740 - accuracy: 0.8274 - val_loss: 0.6292 - val_accuracy: 0.6667
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 1000)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
57/57 [==============================] - 0s 70us/step test loss: 0.6292092747855604, test accuracy: 0.6666666865348816
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.6737891737891738
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.13842482100238673
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -1.430409 | -0.286045 | 0.490919 | 0.872836 | -0.908379 | -0.032724 | -0.630149 | -0.077256 | -0.666116 | -0.409507 | -0.808189 | -0.316827 | 1.001635 |
| 1 | -0.282157 | -1.478798 | -1.125065 | 1.134727 | 0.294401 | -1.552108 | -2.048764 | -0.332843 | -0.298952 | 1.114161 | 0.274322 | 1.796602 | 2.309045 |
| 2 | -0.419749 | 0.440468 | 0.853257 | 0.239250 | -0.398831 | -0.780274 | -0.360447 | -0.316940 | -0.976474 | 0.879549 | -0.457280 | 0.640345 | 0.645756 |
| 3 | -0.431942 | -1.517593 | 0.673149 | 0.786628 | -1.306695 | 1.366669 | 1.142564 | -2.034919 | -0.374780 | 0.475572 | -0.952521 | 0.198178 | 0.640593 |
| 4 | 0.011173 | 0.536562 | -0.966199 | -0.482951 | 0.331291 | 0.606478 | 0.539582 | -0.270964 | 0.042364 | -0.165555 | 0.123590 | 0.101357 | -0.658096 |
| 5 | 0.126251 | -0.590904 | -1.517639 | -1.072279 | 0.624234 | 1.023491 | 0.771080 | 0.598433 | -0.513511 | -0.860661 | 0.652573 | 1.694095 | 0.860230 |
| 6 | 0.830490 | -0.982750 | 2.985373 | 1.132038 | 0.506664 | 3.816969 | -0.181042 | -0.077794 | -0.404476 | -0.632689 | -0.730525 | -0.299915 | -0.619079 |
| 7 | -0.461917 | 0.208363 | 0.278132 | 2.301636 | 1.667266 | 0.690336 | 1.117026 | 1.428294 | 1.041434 | 0.404890 | -1.225161 | -1.083934 | 0.065409 |
| 8 | 0.249199 | -0.247143 | -0.916299 | 0.679460 | -0.342859 | 0.174455 | 0.125219 | 0.900110 | 0.761341 | -0.398986 | -1.063148 | 0.855251 | 2.099798 |
| 9 | 0.311144 | 1.364526 | -0.004701 | 0.405862 | -0.939024 | 0.796235 | -0.589807 | -0.084900 | 0.092344 | -0.628186 | -0.601718 | -0.966679 | 0.086483 |
| 10 | 1.334587 | 0.242352 | 0.597807 | 1.994512 | -0.050194 | 0.166573 | -0.234755 | 0.602868 | -1.535513 | -3.732625 | -0.901353 | 0.263896 | -1.060994 |
| 11 | 1.318614 | -0.797973 | 0.011175 | -0.735851 | -0.310309 | -1.472949 | -0.886459 | 0.062987 | -0.118121 | 0.096635 | 0.405075 | -1.082018 | 0.068160 |
| 12 | 0.824621 | 1.143049 | 0.195198 | -0.036161 | 0.107483 | -0.112574 | -0.229915 | -0.249936 | 0.727625 | -0.048902 | 0.611366 | 0.128240 | 0.758492 |
| 13 | -0.568472 | -0.848858 | 0.090767 | 1.154534 | -0.215999 | -2.659688 | -1.881352 | 0.412350 | -0.790718 | 0.827656 | 0.539638 | 1.593572 | 2.002635 |
| 14 | 0.406968 | 1.201176 | 1.075155 | -0.124700 | 0.135767 | -0.178145 | -0.204776 | -1.618374 | 0.193121 | 0.693318 | 1.055785 | -0.160648 | 0.537962 |
| 15 | -0.287723 | 1.515404 | 0.539186 | 0.427811 | -0.006824 | 0.845973 | 0.412855 | -0.313048 | 0.703395 | 0.284501 | -0.348641 | -0.519018 | -0.664237 |
| 16 | 0.522131 | 0.617054 | 0.277861 | -0.836147 | -1.629099 | -0.005864 | 0.191114 | 0.043306 | 0.248219 | 0.814197 | -0.101490 | -0.978501 | -0.398761 |
| 17 | -0.896889 | -1.308091 | -0.693222 | 0.501341 | -0.162478 | -0.037095 | -0.270506 | 0.346239 | 0.691093 | -0.015841 | -0.871109 | -1.169731 | 0.270587 |
| 18 | 0.875184 | 0.085665 | 0.080385 | 0.242890 | -0.938716 | -1.140718 | -0.097002 | -0.428159 | -0.490553 | -0.095430 | -0.233748 | -0.812311 | 0.895950 |
| 19 | -0.659140 | -0.549349 | -0.129098 | 1.107800 | -3.028026 | -0.963090 | 0.170241 | -2.196582 | -0.518279 | 1.312693 | -1.543068 | -0.398880 | 0.589793 |
| 20 | -0.563764 | -2.009854 | 0.074903 | -0.464555 | -1.109839 | 0.088831 | -0.566914 | 0.384785 | -0.625854 | -0.723645 | -1.000855 | 0.809858 | 0.131808 |
| 21 | 1.250269 | 0.010904 | 0.201698 | -0.957619 | -0.013951 | 1.101073 | -0.075927 | -0.502371 | -1.866004 | 0.114645 | 0.156305 | 0.629372 | 1.366339 |
| 22 | -1.190023 | -0.268956 | -0.555588 | -0.621566 | -0.230031 | 0.264870 | -0.241968 | 0.258810 | 0.329697 | 0.359684 | 0.486221 | 0.532413 | 0.397267 |
| 23 | -0.802365 | 0.152404 | -0.567015 | 0.381822 | 0.592009 | -0.562610 | -1.563042 | 0.721323 | 2.941459 | 2.101624 | 0.519023 | -0.366100 | -0.253206 |
| 24 | -0.893865 | 0.341962 | -0.830315 | 0.329795 | 0.563212 | -0.327526 | -1.488529 | 0.513889 | 2.829533 | 2.088829 | 0.636581 | -0.214855 | -0.258980 |
| 25 | -0.121562 | 1.347439 | 0.744281 | 0.698770 | -0.746534 | -0.657656 | -0.027263 | -0.447918 | 0.338675 | 1.028922 | 0.807479 | 0.808201 | -0.400533 |
| 26 | -1.615036 | 1.323285 | 0.625582 | 0.721012 | -0.691466 | -0.853918 | -0.638037 | -0.521595 | 0.602985 | 0.889454 | -0.304429 | -1.597203 | -0.132420 |
| 27 | -0.522333 | 1.534988 | -0.017521 | -0.056191 | 0.437400 | 1.304359 | 0.421225 | -0.229724 | 0.856944 | 0.363658 | -0.300702 | 0.199378 | -0.091915 |
| 28 | 0.020009 | -0.529617 | -0.687843 | -1.068170 | -0.317981 | -0.873679 | 2.545261 | 0.319549 | 0.389927 | -1.527578 | -0.515574 | -1.761353 | 0.997758 |
| 29 | 0.294921 | -0.175540 | -2.170589 | 0.206338 | 0.608432 | -2.666144 | -0.240215 | 1.450295 | 1.100392 | -1.316262 | 0.000504 | -3.910703 | -0.679615 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 195 | -0.713121 | -0.124100 | 0.184524 | -0.289193 | 1.143155 | -0.040337 | -0.456140 | 0.386906 | -0.409479 | -0.013040 | -0.448614 | 0.678472 | 0.856538 |
| 196 | 0.088904 | -0.480222 | 0.314568 | 0.011773 | -0.871197 | -0.254444 | -1.214378 | 0.074859 | 0.805154 | 0.107568 | -0.101697 | 0.725634 | -0.674412 |
| 197 | -0.819978 | -0.214030 | 0.603520 | -0.224844 | 0.008606 | 0.221715 | 0.248298 | 0.580332 | 0.548519 | 1.036227 | 0.084133 | -0.973833 | -1.118049 |
| 198 | 0.089868 | -0.642488 | -0.163873 | 0.162352 | -2.220413 | -1.138568 | -0.479671 | 2.364939 | 0.560742 | -0.314032 | 1.123286 | 0.586946 | 1.028865 |
| 199 | 0.490718 | -0.570100 | -0.601226 | -1.373455 | -2.219961 | -1.917807 | -1.874507 | 2.571947 | 0.227763 | -1.470442 | 0.797456 | 0.390489 | 0.443397 |
| 200 | -0.725311 | 0.995058 | 1.698440 | -0.409174 | 1.345314 | -0.371509 | 0.853418 | 0.950027 | 0.524004 | -0.776995 | 0.384800 | 0.564620 | 0.242242 |
| 201 | -0.088956 | 0.381477 | 0.665527 | 0.871194 | 0.436489 | 0.159443 | 0.297415 | 0.889031 | -0.725496 | -0.079337 | 0.812380 | 0.888138 | 0.315317 |
| 202 | -0.627833 | 0.625737 | 0.574533 | 0.351258 | 0.679287 | 0.045521 | -0.005976 | -0.356564 | -0.077959 | -0.043215 | 1.410034 | 0.680198 | 0.118446 |
| 203 | -0.591412 | -1.691897 | -1.280443 | 0.004164 | 0.029231 | 1.050215 | 1.339125 | 0.772476 | -0.548882 | -0.875977 | -0.739724 | -0.002660 | 0.550573 |
| 204 | -0.213359 | -0.655305 | -0.545729 | -0.525488 | -0.788705 | -0.715509 | 0.006099 | 0.426857 | 0.380788 | 0.708127 | 1.161262 | 0.582157 | 0.085247 |
| 205 | 0.561065 | -0.270819 | -0.376934 | 1.362033 | -0.711445 | 1.354311 | -0.206743 | -0.142823 | 1.553667 | -0.536125 | -1.684840 | 0.688414 | -1.009051 |
| 206 | 0.905511 | 0.711971 | 1.427482 | 0.086910 | -0.261378 | 0.561744 | 0.222402 | -1.622424 | 0.024138 | 0.934010 | -0.468465 | 0.170884 | 0.221140 |
| 207 | 0.067531 | 0.386370 | -0.307384 | -0.040460 | -0.049760 | -0.032394 | 0.431908 | 1.042755 | -1.011654 | -0.412244 | -0.108096 | 0.184730 | -0.138904 |
| 208 | -0.059758 | 0.045608 | -0.194712 | -0.038341 | -0.248229 | 0.218675 | -0.402325 | 0.773505 | 0.041528 | 0.766170 | -0.042900 | 0.536066 | 0.856860 |
| 209 | 0.486749 | -0.756717 | -0.874808 | -0.729025 | -1.528664 | 0.066851 | -0.217409 | 2.535472 | 0.810467 | 2.007519 | -1.633543 | 1.078687 | -1.214695 |
| 210 | -0.009918 | 0.023157 | -0.108599 | -0.353482 | -0.525150 | 0.026241 | -0.209046 | 0.952549 | 0.118271 | 0.761473 | -0.021901 | 0.723007 | 1.176221 |
| 211 | -0.852239 | -0.126721 | 1.768756 | -0.139569 | 1.457419 | -1.896514 | 0.722738 | -1.858343 | 1.023542 | 0.337423 | -1.696471 | 0.797698 | -1.801833 |
| 212 | -1.605282 | 0.546705 | -0.027523 | -0.007901 | 0.390982 | 0.752113 | 0.108134 | -0.532402 | -0.658558 | -0.655673 | -0.110552 | -0.038507 | 0.564082 |
| 213 | -1.537486 | 0.438542 | -0.054954 | -0.009054 | 0.565426 | 0.944990 | 0.000999 | -0.699569 | -0.616522 | -0.546167 | 0.075944 | 0.000029 | 0.772172 |
| 214 | 0.478176 | -0.623588 | -1.163628 | -0.024044 | -0.377051 | 0.114672 | -1.189664 | -0.599743 | 0.064422 | -0.284247 | 0.793914 | 0.752339 | -0.558744 |
| 215 | -0.653553 | -0.272142 | 0.596156 | 0.881373 | -2.295187 | 0.283720 | -0.193981 | -0.067370 | 0.777762 | -0.959991 | -0.275185 | 0.990175 | -2.038870 |
| 216 | -0.746791 | -0.229040 | 0.929885 | 0.869993 | -2.913181 | 0.212781 | -0.305148 | -0.195613 | 1.117807 | -0.935236 | -0.012361 | 1.696083 | -2.096406 |
| 217 | -0.402132 | 0.567649 | 0.658617 | 1.252447 | 0.282722 | -0.867962 | -0.658417 | -0.711252 | 0.412558 | 0.018922 | -0.656841 | -0.668848 | -0.606249 |
| 218 | 0.253416 | 2.028743 | -0.249389 | -0.135717 | -0.432099 | -1.127803 | 0.330577 | -0.150249 | 1.400036 | -0.237307 | -1.036935 | 0.796314 | 0.315125 |
| 219 | -0.268737 | 0.304053 | 0.442309 | 1.466913 | -0.190859 | -1.339833 | -0.412332 | -0.934782 | -0.340795 | -0.803146 | -0.913412 | -1.601519 | -0.895268 |
| 220 | 0.164485 | 1.445490 | 2.674724 | -0.663649 | -0.232015 | 1.428702 | 0.961717 | -2.600552 | -0.356496 | 0.842619 | 2.796380 | 0.923058 | 1.962832 |
| 221 | 0.702551 | 0.697481 | 0.141117 | -0.647568 | 0.265119 | 0.543185 | 0.967290 | -0.204736 | -0.388782 | -1.297479 | 1.446403 | 1.404421 | 0.191658 |
| 222 | 0.727431 | 0.078441 | -0.027658 | -0.293281 | -0.116893 | 0.762821 | -0.474665 | -0.142512 | -0.107789 | 0.767739 | -0.106817 | -0.016677 | 0.681705 |
| 223 | -1.601068 | 0.456897 | -0.776221 | -0.202831 | 0.972321 | 1.554034 | 1.293388 | 0.533103 | -0.658778 | -0.011963 | -0.965611 | 1.297730 | 2.334936 |
| 224 | -0.775289 | -1.780714 | -0.773207 | -0.130797 | -0.258296 | 0.465109 | 0.964189 | -0.054493 | -0.318554 | -0.116591 | 0.490944 | 0.263715 | 0.162778 |
225 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[2925.0, 2654.4694551642833, 2487.537140339455, 2347.3385606542174, 2241.219607532395, 2187.984558812649, 2078.2651666536785, 1992.3748205985125, 1984.6213758642439, 1904.4598519281963, 1850.0132981301954, 1816.2106987200941, 1761.4652710138919, 1722.4326028221699]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82f5d1048>]
K=2
kmeans_mfcc = KMeans(n_clusters=2, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=2, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1,
0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0,
0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1,
0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0,
1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1,
0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1,
1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0,
0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0,
0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1,
1, 1, 1, 1, 0])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1,
0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0,
0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1,
0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0,
1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1,
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1,
0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1,
1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0,
0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0,
0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1,
1, 1, 1, 1, 0])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -1.430409 | -0.286045 | 0.490919 | 0.872836 | -0.908379 | -0.032724 | -0.630149 | -0.077256 | -0.666116 | -0.409507 | -0.808189 | -0.316827 | 1.001635 | 1 | 0 |
| 1 | -0.282157 | -1.478798 | -1.125065 | 1.134727 | 0.294401 | -1.552108 | -2.048764 | -0.332843 | -0.298952 | 1.114161 | 0.274322 | 1.796602 | 2.309045 | 1 | 0 |
| 2 | -0.419749 | 0.440468 | 0.853257 | 0.239250 | -0.398831 | -0.780274 | -0.360447 | -0.316940 | -0.976474 | 0.879549 | -0.457280 | 0.640345 | 0.645756 | 1 | 0 |
| 3 | -0.431942 | -1.517593 | 0.673149 | 0.786628 | -1.306695 | 1.366669 | 1.142564 | -2.034919 | -0.374780 | 0.475572 | -0.952521 | 0.198178 | 0.640593 | 1 | 0 |
| 4 | 0.011173 | 0.536562 | -0.966199 | -0.482951 | 0.331291 | 0.606478 | 0.539582 | -0.270964 | 0.042364 | -0.165555 | 0.123590 | 0.101357 | -0.658096 | 0 | 0 |
| 5 | 0.126251 | -0.590904 | -1.517639 | -1.072279 | 0.624234 | 1.023491 | 0.771080 | 0.598433 | -0.513511 | -0.860661 | 0.652573 | 1.694095 | 0.860230 | 0 | 0 |
| 6 | 0.830490 | -0.982750 | 2.985373 | 1.132038 | 0.506664 | 3.816969 | -0.181042 | -0.077794 | -0.404476 | -0.632689 | -0.730525 | -0.299915 | -0.619079 | 1 | 0 |
| 7 | -0.461917 | 0.208363 | 0.278132 | 2.301636 | 1.667266 | 0.690336 | 1.117026 | 1.428294 | 1.041434 | 0.404890 | -1.225161 | -1.083934 | 0.065409 | 1 | 0 |
| 8 | 0.249199 | -0.247143 | -0.916299 | 0.679460 | -0.342859 | 0.174455 | 0.125219 | 0.900110 | 0.761341 | -0.398986 | -1.063148 | 0.855251 | 2.099798 | 1 | 0 |
| 9 | 0.311144 | 1.364526 | -0.004701 | 0.405862 | -0.939024 | 0.796235 | -0.589807 | -0.084900 | 0.092344 | -0.628186 | -0.601718 | -0.966679 | 0.086483 | 1 | 0 |
| 10 | 1.334587 | 0.242352 | 0.597807 | 1.994512 | -0.050194 | 0.166573 | -0.234755 | 0.602868 | -1.535513 | -3.732625 | -0.901353 | 0.263896 | -1.060994 | 1 | 0 |
| 11 | 1.318614 | -0.797973 | 0.011175 | -0.735851 | -0.310309 | -1.472949 | -0.886459 | 0.062987 | -0.118121 | 0.096635 | 0.405075 | -1.082018 | 0.068160 | 0 | 0 |
| 12 | 0.824621 | 1.143049 | 0.195198 | -0.036161 | 0.107483 | -0.112574 | -0.229915 | -0.249936 | 0.727625 | -0.048902 | 0.611366 | 0.128240 | 0.758492 | 1 | 0 |
| 13 | -0.568472 | -0.848858 | 0.090767 | 1.154534 | -0.215999 | -2.659688 | -1.881352 | 0.412350 | -0.790718 | 0.827656 | 0.539638 | 1.593572 | 2.002635 | 1 | 0 |
| 14 | 0.406968 | 1.201176 | 1.075155 | -0.124700 | 0.135767 | -0.178145 | -0.204776 | -1.618374 | 0.193121 | 0.693318 | 1.055785 | -0.160648 | 0.537962 | 1 | 0 |
| 15 | -0.287723 | 1.515404 | 0.539186 | 0.427811 | -0.006824 | 0.845973 | 0.412855 | -0.313048 | 0.703395 | 0.284501 | -0.348641 | -0.519018 | -0.664237 | 1 | 0 |
| 16 | 0.522131 | 0.617054 | 0.277861 | -0.836147 | -1.629099 | -0.005864 | 0.191114 | 0.043306 | 0.248219 | 0.814197 | -0.101490 | -0.978501 | -0.398761 | 0 | 0 |
| 17 | -0.896889 | -1.308091 | -0.693222 | 0.501341 | -0.162478 | -0.037095 | -0.270506 | 0.346239 | 0.691093 | -0.015841 | -0.871109 | -1.169731 | 0.270587 | 0 | 0 |
| 18 | 0.875184 | 0.085665 | 0.080385 | 0.242890 | -0.938716 | -1.140718 | -0.097002 | -0.428159 | -0.490553 | -0.095430 | -0.233748 | -0.812311 | 0.895950 | 1 | 0 |
| 19 | -0.659140 | -0.549349 | -0.129098 | 1.107800 | -3.028026 | -0.963090 | 0.170241 | -2.196582 | -0.518279 | 1.312693 | -1.543068 | -0.398880 | 0.589793 | 1 | 0 |
| 20 | -0.563764 | -2.009854 | 0.074903 | -0.464555 | -1.109839 | 0.088831 | -0.566914 | 0.384785 | -0.625854 | -0.723645 | -1.000855 | 0.809858 | 0.131808 | 0 | 0 |
| 21 | 1.250269 | 0.010904 | 0.201698 | -0.957619 | -0.013951 | 1.101073 | -0.075927 | -0.502371 | -1.866004 | 0.114645 | 0.156305 | 0.629372 | 1.366339 | 1 | 0 |
| 22 | -1.190023 | -0.268956 | -0.555588 | -0.621566 | -0.230031 | 0.264870 | -0.241968 | 0.258810 | 0.329697 | 0.359684 | 0.486221 | 0.532413 | 0.397267 | 0 | 0 |
| 23 | -0.802365 | 0.152404 | -0.567015 | 0.381822 | 0.592009 | -0.562610 | -1.563042 | 0.721323 | 2.941459 | 2.101624 | 0.519023 | -0.366100 | -0.253206 | 0 | 0 |
| 24 | -0.893865 | 0.341962 | -0.830315 | 0.329795 | 0.563212 | -0.327526 | -1.488529 | 0.513889 | 2.829533 | 2.088829 | 0.636581 | -0.214855 | -0.258980 | 0 | 0 |
| 25 | -0.121562 | 1.347439 | 0.744281 | 0.698770 | -0.746534 | -0.657656 | -0.027263 | -0.447918 | 0.338675 | 1.028922 | 0.807479 | 0.808201 | -0.400533 | 1 | 0 |
| 26 | -1.615036 | 1.323285 | 0.625582 | 0.721012 | -0.691466 | -0.853918 | -0.638037 | -0.521595 | 0.602985 | 0.889454 | -0.304429 | -1.597203 | -0.132420 | 1 | 0 |
| 27 | -0.522333 | 1.534988 | -0.017521 | -0.056191 | 0.437400 | 1.304359 | 0.421225 | -0.229724 | 0.856944 | 0.363658 | -0.300702 | 0.199378 | -0.091915 | 1 | 0 |
| 28 | 0.020009 | -0.529617 | -0.687843 | -1.068170 | -0.317981 | -0.873679 | 2.545261 | 0.319549 | 0.389927 | -1.527578 | -0.515574 | -1.761353 | 0.997758 | 0 | 0 |
| 29 | 0.294921 | -0.175540 | -2.170589 | 0.206338 | 0.608432 | -2.666144 | -0.240215 | 1.450295 | 1.100392 | -1.316262 | 0.000504 | -3.910703 | -0.679615 | 0 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 195 | -0.713121 | -0.124100 | 0.184524 | -0.289193 | 1.143155 | -0.040337 | -0.456140 | 0.386906 | -0.409479 | -0.013040 | -0.448614 | 0.678472 | 0.856538 | 1 | 1 |
| 196 | 0.088904 | -0.480222 | 0.314568 | 0.011773 | -0.871197 | -0.254444 | -1.214378 | 0.074859 | 0.805154 | 0.107568 | -0.101697 | 0.725634 | -0.674412 | 0 | 1 |
| 197 | -0.819978 | -0.214030 | 0.603520 | -0.224844 | 0.008606 | 0.221715 | 0.248298 | 0.580332 | 0.548519 | 1.036227 | 0.084133 | -0.973833 | -1.118049 | 0 | 1 |
| 198 | 0.089868 | -0.642488 | -0.163873 | 0.162352 | -2.220413 | -1.138568 | -0.479671 | 2.364939 | 0.560742 | -0.314032 | 1.123286 | 0.586946 | 1.028865 | 0 | 1 |
| 199 | 0.490718 | -0.570100 | -0.601226 | -1.373455 | -2.219961 | -1.917807 | -1.874507 | 2.571947 | 0.227763 | -1.470442 | 0.797456 | 0.390489 | 0.443397 | 0 | 1 |
| 200 | -0.725311 | 0.995058 | 1.698440 | -0.409174 | 1.345314 | -0.371509 | 0.853418 | 0.950027 | 0.524004 | -0.776995 | 0.384800 | 0.564620 | 0.242242 | 1 | 1 |
| 201 | -0.088956 | 0.381477 | 0.665527 | 0.871194 | 0.436489 | 0.159443 | 0.297415 | 0.889031 | -0.725496 | -0.079337 | 0.812380 | 0.888138 | 0.315317 | 1 | 1 |
| 202 | -0.627833 | 0.625737 | 0.574533 | 0.351258 | 0.679287 | 0.045521 | -0.005976 | -0.356564 | -0.077959 | -0.043215 | 1.410034 | 0.680198 | 0.118446 | 1 | 1 |
| 203 | -0.591412 | -1.691897 | -1.280443 | 0.004164 | 0.029231 | 1.050215 | 1.339125 | 0.772476 | -0.548882 | -0.875977 | -0.739724 | -0.002660 | 0.550573 | 0 | 1 |
| 204 | -0.213359 | -0.655305 | -0.545729 | -0.525488 | -0.788705 | -0.715509 | 0.006099 | 0.426857 | 0.380788 | 0.708127 | 1.161262 | 0.582157 | 0.085247 | 0 | 1 |
| 205 | 0.561065 | -0.270819 | -0.376934 | 1.362033 | -0.711445 | 1.354311 | -0.206743 | -0.142823 | 1.553667 | -0.536125 | -1.684840 | 0.688414 | -1.009051 | 0 | 1 |
| 206 | 0.905511 | 0.711971 | 1.427482 | 0.086910 | -0.261378 | 0.561744 | 0.222402 | -1.622424 | 0.024138 | 0.934010 | -0.468465 | 0.170884 | 0.221140 | 1 | 1 |
| 207 | 0.067531 | 0.386370 | -0.307384 | -0.040460 | -0.049760 | -0.032394 | 0.431908 | 1.042755 | -1.011654 | -0.412244 | -0.108096 | 0.184730 | -0.138904 | 0 | 1 |
| 208 | -0.059758 | 0.045608 | -0.194712 | -0.038341 | -0.248229 | 0.218675 | -0.402325 | 0.773505 | 0.041528 | 0.766170 | -0.042900 | 0.536066 | 0.856860 | 1 | 1 |
| 209 | 0.486749 | -0.756717 | -0.874808 | -0.729025 | -1.528664 | 0.066851 | -0.217409 | 2.535472 | 0.810467 | 2.007519 | -1.633543 | 1.078687 | -1.214695 | 0 | 1 |
| 210 | -0.009918 | 0.023157 | -0.108599 | -0.353482 | -0.525150 | 0.026241 | -0.209046 | 0.952549 | 0.118271 | 0.761473 | -0.021901 | 0.723007 | 1.176221 | 1 | 1 |
| 211 | -0.852239 | -0.126721 | 1.768756 | -0.139569 | 1.457419 | -1.896514 | 0.722738 | -1.858343 | 1.023542 | 0.337423 | -1.696471 | 0.797698 | -1.801833 | 1 | 1 |
| 212 | -1.605282 | 0.546705 | -0.027523 | -0.007901 | 0.390982 | 0.752113 | 0.108134 | -0.532402 | -0.658558 | -0.655673 | -0.110552 | -0.038507 | 0.564082 | 1 | 1 |
| 213 | -1.537486 | 0.438542 | -0.054954 | -0.009054 | 0.565426 | 0.944990 | 0.000999 | -0.699569 | -0.616522 | -0.546167 | 0.075944 | 0.000029 | 0.772172 | 1 | 1 |
| 214 | 0.478176 | -0.623588 | -1.163628 | -0.024044 | -0.377051 | 0.114672 | -1.189664 | -0.599743 | 0.064422 | -0.284247 | 0.793914 | 0.752339 | -0.558744 | 0 | 1 |
| 215 | -0.653553 | -0.272142 | 0.596156 | 0.881373 | -2.295187 | 0.283720 | -0.193981 | -0.067370 | 0.777762 | -0.959991 | -0.275185 | 0.990175 | -2.038870 | 0 | 1 |
| 216 | -0.746791 | -0.229040 | 0.929885 | 0.869993 | -2.913181 | 0.212781 | -0.305148 | -0.195613 | 1.117807 | -0.935236 | -0.012361 | 1.696083 | -2.096406 | 0 | 1 |
| 217 | -0.402132 | 0.567649 | 0.658617 | 1.252447 | 0.282722 | -0.867962 | -0.658417 | -0.711252 | 0.412558 | 0.018922 | -0.656841 | -0.668848 | -0.606249 | 1 | 1 |
| 218 | 0.253416 | 2.028743 | -0.249389 | -0.135717 | -0.432099 | -1.127803 | 0.330577 | -0.150249 | 1.400036 | -0.237307 | -1.036935 | 0.796314 | 0.315125 | 1 | 1 |
| 219 | -0.268737 | 0.304053 | 0.442309 | 1.466913 | -0.190859 | -1.339833 | -0.412332 | -0.934782 | -0.340795 | -0.803146 | -0.913412 | -1.601519 | -0.895268 | 1 | 1 |
| 220 | 0.164485 | 1.445490 | 2.674724 | -0.663649 | -0.232015 | 1.428702 | 0.961717 | -2.600552 | -0.356496 | 0.842619 | 2.796380 | 0.923058 | 1.962832 | 1 | 1 |
| 221 | 0.702551 | 0.697481 | 0.141117 | -0.647568 | 0.265119 | 0.543185 | 0.967290 | -0.204736 | -0.388782 | -1.297479 | 1.446403 | 1.404421 | 0.191658 | 1 | 1 |
| 222 | 0.727431 | 0.078441 | -0.027658 | -0.293281 | -0.116893 | 0.762821 | -0.474665 | -0.142512 | -0.107789 | 0.767739 | -0.106817 | -0.016677 | 0.681705 | 1 | 1 |
| 223 | -1.601068 | 0.456897 | -0.776221 | -0.202831 | 0.972321 | 1.554034 | 1.293388 | 0.533103 | -0.658778 | -0.011963 | -0.965611 | 1.297730 | 2.334936 | 1 | 1 |
| 224 | -0.775289 | -1.780714 | -0.773207 | -0.130797 | -0.258296 | 0.465109 | 0.964189 | -0.054493 | -0.318554 | -0.116591 | 0.490944 | 0.263715 | 0.162778 | 0 | 1 |
225 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82f5fadd8>
df_n_ps_std[0].columns
Index(['durationfiles', 'rmsfiles', 'rmsmedianfiles', 'lowenergyfiles',
'ASRfiles', 'beatspectrumfiles', 'eventdensityfiles', 'tempofiles',
'pulseclarityfiles', 'zerocrossfiles', 'rolloffsfiles',
'brightnessfiles', 'spreadfiles', 'centroidfiles', 'kurtosisfiles',
'flatnessfiles', 'entropyfiles', 'mfccfiles_1', 'mfccfiles_2',
'mfccfiles_3', 'mfccfiles_4', 'mfccfiles_5', 'mfccfiles_6',
'mfccfiles_7', 'mfccfiles_8', 'mfccfiles_9', 'mfccfiles_10',
'mfccfiles_11', 'mfccfiles_12', 'mfccfiles_13', 'inharmonicityfiles',
'bestkeyfiles', 'keyclarityfiles', 'modalityfiles',
'tonalcentroidfiles_1', 'tonalcentroidfiles_2', 'tonalcentroidfiles_3',
'tonalcentroidfiles_4', 'tonalcentroidfiles_5', 'tonalcentroidfiles_6',
'chromagramfiles_1', 'chromagramfiles_2', 'chromagramfiles_3',
'chromagramfiles_4', 'chromagramfiles_5', 'chromagramfiles_6',
'chromagramfiles_7', 'chromagramfiles_8', 'chromagramfiles_9',
'chromagramfiles_10', 'chromagramfiles_11', 'chromagramfiles_12',
'attackslopefiles', 'attackleapfiles', 'chosen'],
dtype='object')
df_n_ps_std[0].columns[34:40]
Index(['tonalcentroidfiles_1', 'tonalcentroidfiles_2', 'tonalcentroidfiles_3',
'tonalcentroidfiles_4', 'tonalcentroidfiles_5', 'tonalcentroidfiles_6'],
dtype='object')
df_n_ps_std_tc = [None]*len(companies)
for i in range(len(companies)):
df_n_ps_std_tc[i] = pd.DataFrame(df_n_ps_std[i].iloc[:,34:40])
df_n_ps_std_tc[i].columns=df_n_ps_std[i].columns[34:40]
df_n_ps_std_tc[0].info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 372 entries, 0 to 371 Data columns (total 6 columns): tonalcentroidfiles_1 372 non-null float64 tonalcentroidfiles_2 372 non-null float64 tonalcentroidfiles_3 372 non-null float64 tonalcentroidfiles_4 372 non-null float64 tonalcentroidfiles_5 372 non-null float64 tonalcentroidfiles_6 372 non-null float64 dtypes: float64(6) memory usage: 17.5 KB
X = df_n_ps_std_tc[0]
y = df_n_ps[0]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(279, 6)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (20,), 'learning_rate_init': 0.006, 'max_iter': 300}, que permiten obtener un Accuracy de 82.08% y un Kappa del 43.49
Tiempo total: 29.12 minutos
C:\ProgramData\Anaconda3\lib\site-packages\sklearn\neural_network\multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (300) reached and the optimization hasn't converged yet. % self.max_iter, ConvergenceWarning)
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = 0.006
epochs = 300
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_3 (Dense) (None, 20) 280 _________________________________________________________________ dense_4 (Dense) (None, 1) 21 ================================================================= Total params: 301 Trainable params: 301 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 279 samples, validate on 93 samples Epoch 1/300 279/279 [==============================] - 1s 3ms/step - loss: 0.7063 - accuracy: 0.5197 - val_loss: 0.6660 - val_accuracy: 0.5914 Epoch 2/300 279/279 [==============================] - 0s 61us/step - loss: 0.6003 - accuracy: 0.6918 - val_loss: 0.5741 - val_accuracy: 0.7419 Epoch 3/300 279/279 [==============================] - 0s 68us/step - loss: 0.5317 - accuracy: 0.7348 - val_loss: 0.5278 - val_accuracy: 0.7527 Epoch 4/300 279/279 [==============================] - 0s 72us/step - loss: 0.4996 - accuracy: 0.7634 - val_loss: 0.5053 - val_accuracy: 0.7742 Epoch 5/300 279/279 [==============================] - 0s 75us/step - loss: 0.4770 - accuracy: 0.7778 - val_loss: 0.4938 - val_accuracy: 0.8065 Epoch 6/300 279/279 [==============================] - 0s 90us/step - loss: 0.4639 - accuracy: 0.7778 - val_loss: 0.4845 - val_accuracy: 0.7957 Epoch 7/300 279/279 [==============================] - 0s 82us/step - loss: 0.4526 - accuracy: 0.7849 - val_loss: 0.4813 - val_accuracy: 0.8172 Epoch 8/300 279/279 [==============================] - 0s 75us/step - loss: 0.4466 - accuracy: 0.7921 - val_loss: 0.4822 - val_accuracy: 0.8065 Epoch 9/300 279/279 [==============================] - 0s 79us/step - loss: 0.4412 - accuracy: 0.7993 - val_loss: 0.4819 - val_accuracy: 0.8172 Epoch 10/300 279/279 [==============================] - 0s 82us/step - loss: 0.4373 - accuracy: 0.8065 - val_loss: 0.4840 - val_accuracy: 0.7957 Epoch 11/300 279/279 [==============================] - 0s 86us/step - loss: 0.4343 - accuracy: 0.8136 - val_loss: 0.4827 - val_accuracy: 0.7957 Epoch 12/300 279/279 [==============================] - 0s 75us/step - loss: 0.4328 - accuracy: 0.8100 - val_loss: 0.4866 - val_accuracy: 0.7849 Epoch 13/300 279/279 [==============================] - 0s 82us/step - loss: 0.4225 - accuracy: 0.8136 - val_loss: 0.4860 - val_accuracy: 0.7957 Epoch 14/300 279/279 [==============================] - 0s 75us/step - loss: 0.4186 - accuracy: 0.8136 - val_loss: 0.4865 - val_accuracy: 0.7849 Epoch 15/300 279/279 [==============================] - 0s 82us/step - loss: 0.4118 - accuracy: 0.8136 - val_loss: 0.4846 - val_accuracy: 0.7849 Epoch 16/300 279/279 [==============================] - 0s 79us/step - loss: 0.4080 - accuracy: 0.8208 - val_loss: 0.4901 - val_accuracy: 0.7849 Epoch 17/300 279/279 [==============================] - 0s 107us/step - loss: 0.4009 - accuracy: 0.8351 - val_loss: 0.4878 - val_accuracy: 0.7742 Epoch 00017: ReduceLROnPlateau reducing learning rate to 0.003000000026077032. Epoch 18/300 279/279 [==============================] - 0s 107us/step - loss: 0.3950 - accuracy: 0.8387 - val_loss: 0.4864 - val_accuracy: 0.7742 Epoch 19/300 279/279 [==============================] - 0s 68us/step - loss: 0.3922 - accuracy: 0.8387 - val_loss: 0.4852 - val_accuracy: 0.7742 Epoch 20/300 279/279 [==============================] - 0s 86us/step - loss: 0.3886 - accuracy: 0.8530 - val_loss: 0.4807 - val_accuracy: 0.7849 Epoch 21/300 279/279 [==============================] - 0s 64us/step - loss: 0.3865 - accuracy: 0.8566 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 22/300 279/279 [==============================] - 0s 82us/step - loss: 0.3828 - accuracy: 0.8530 - val_loss: 0.4848 - val_accuracy: 0.7849 Epoch 23/300 279/279 [==============================] - 0s 79us/step - loss: 0.3800 - accuracy: 0.8566 - val_loss: 0.4855 - val_accuracy: 0.7849 Epoch 24/300 279/279 [==============================] - 0s 68us/step - loss: 0.3760 - accuracy: 0.8638 - val_loss: 0.4818 - val_accuracy: 0.7849 Epoch 25/300 279/279 [==============================] - 0s 79us/step - loss: 0.3744 - accuracy: 0.8602 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 26/300 279/279 [==============================] - 0s 86us/step - loss: 0.3705 - accuracy: 0.8710 - val_loss: 0.4781 - val_accuracy: 0.7849 Epoch 27/300 279/279 [==============================] - 0s 86us/step - loss: 0.3666 - accuracy: 0.8746 - val_loss: 0.4785 - val_accuracy: 0.7849 Epoch 00027: ReduceLROnPlateau reducing learning rate to 0.001500000013038516. Epoch 28/300 279/279 [==============================] - 0s 79us/step - loss: 0.3637 - accuracy: 0.8746 - val_loss: 0.4799 - val_accuracy: 0.7849 Epoch 29/300 279/279 [==============================] - 0s 75us/step - loss: 0.3620 - accuracy: 0.8746 - val_loss: 0.4816 - val_accuracy: 0.7849 Epoch 30/300 279/279 [==============================] - 0s 79us/step - loss: 0.3604 - accuracy: 0.8746 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 31/300 279/279 [==============================] - 0s 75us/step - loss: 0.3596 - accuracy: 0.8746 - val_loss: 0.4831 - val_accuracy: 0.7849 Epoch 32/300 279/279 [==============================] - 0s 79us/step - loss: 0.3572 - accuracy: 0.8781 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 33/300 279/279 [==============================] - 0s 79us/step - loss: 0.3555 - accuracy: 0.8781 - val_loss: 0.4817 - val_accuracy: 0.7849 Epoch 34/300 279/279 [==============================] - 0s 79us/step - loss: 0.3540 - accuracy: 0.8817 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 35/300 279/279 [==============================] - 0s 82us/step - loss: 0.3527 - accuracy: 0.8817 - val_loss: 0.4817 - val_accuracy: 0.7849 Epoch 36/300 279/279 [==============================] - 0s 75us/step - loss: 0.3510 - accuracy: 0.8817 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 37/300 279/279 [==============================] - 0s 104us/step - loss: 0.3493 - accuracy: 0.8781 - val_loss: 0.4836 - val_accuracy: 0.7849 Epoch 00037: ReduceLROnPlateau reducing learning rate to 0.000750000006519258. Epoch 38/300 279/279 [==============================] - 0s 86us/step - loss: 0.3476 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 39/300 279/279 [==============================] - 0s 82us/step - loss: 0.3469 - accuracy: 0.8817 - val_loss: 0.4823 - val_accuracy: 0.7849 Epoch 40/300 279/279 [==============================] - 0s 75us/step - loss: 0.3457 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 41/300 279/279 [==============================] - 0s 82us/step - loss: 0.3449 - accuracy: 0.8817 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 42/300 279/279 [==============================] - 0s 90us/step - loss: 0.3443 - accuracy: 0.8817 - val_loss: 0.4830 - val_accuracy: 0.7849 Epoch 43/300 279/279 [==============================] - 0s 104us/step - loss: 0.3434 - accuracy: 0.8817 - val_loss: 0.4833 - val_accuracy: 0.7849 Epoch 44/300 279/279 [==============================] - 0s 93us/step - loss: 0.3427 - accuracy: 0.8817 - val_loss: 0.4835 - val_accuracy: 0.7849 Epoch 45/300 279/279 [==============================] - 0s 90us/step - loss: 0.3418 - accuracy: 0.8817 - val_loss: 0.4831 - val_accuracy: 0.7849 Epoch 46/300 279/279 [==============================] - 0s 93us/step - loss: 0.3411 - accuracy: 0.8817 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 47/300 279/279 [==============================] - 0s 97us/step - loss: 0.3403 - accuracy: 0.8817 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 00047: ReduceLROnPlateau reducing learning rate to 0.000375000003259629. Epoch 48/300 279/279 [==============================] - 0s 111us/step - loss: 0.3394 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 49/300 279/279 [==============================] - 0s 100us/step - loss: 0.3389 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 50/300 279/279 [==============================] - 0s 111us/step - loss: 0.3386 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 51/300 279/279 [==============================] - 0s 115us/step - loss: 0.3381 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 52/300 279/279 [==============================] - 0s 136us/step - loss: 0.3378 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 53/300 279/279 [==============================] - 0s 118us/step - loss: 0.3373 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 54/300 279/279 [==============================] - 0s 86us/step - loss: 0.3370 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 55/300 279/279 [==============================] - 0s 115us/step - loss: 0.3365 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 56/300 279/279 [==============================] - 0s 107us/step - loss: 0.3362 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 57/300 279/279 [==============================] - 0s 122us/step - loss: 0.3358 - accuracy: 0.8853 - val_loss: 0.4830 - val_accuracy: 0.7849 Epoch 00057: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145. Epoch 58/300 279/279 [==============================] - 0s 104us/step - loss: 0.3353 - accuracy: 0.8853 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 59/300 279/279 [==============================] - 0s 107us/step - loss: 0.3351 - accuracy: 0.8853 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 60/300 279/279 [==============================] - 0s 104us/step - loss: 0.3349 - accuracy: 0.8853 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 61/300 279/279 [==============================] - 0s 104us/step - loss: 0.3347 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 62/300 279/279 [==============================] - 0s 100us/step - loss: 0.3345 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 63/300 279/279 [==============================] - 0s 79us/step - loss: 0.3343 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 64/300 279/279 [==============================] - 0s 90us/step - loss: 0.3341 - accuracy: 0.8889 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 65/300 279/279 [==============================] - 0s 93us/step - loss: 0.3339 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 66/300 279/279 [==============================] - 0s 104us/step - loss: 0.3337 - accuracy: 0.8889 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 67/300 279/279 [==============================] - 0s 79us/step - loss: 0.3335 - accuracy: 0.8889 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 00067: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05. Epoch 68/300 279/279 [==============================] - 0s 100us/step - loss: 0.3333 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 69/300 279/279 [==============================] - 0s 104us/step - loss: 0.3332 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 70/300 279/279 [==============================] - 0s 90us/step - loss: 0.3331 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 71/300 279/279 [==============================] - 0s 100us/step - loss: 0.3330 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 72/300 279/279 [==============================] - 0s 107us/step - loss: 0.3329 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 73/300 279/279 [==============================] - 0s 104us/step - loss: 0.3328 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 74/300 279/279 [==============================] - 0s 118us/step - loss: 0.3327 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 75/300 279/279 [==============================] - 0s 100us/step - loss: 0.3326 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 76/300 279/279 [==============================] - 0s 90us/step - loss: 0.3325 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 77/300 279/279 [==============================] - 0s 72us/step - loss: 0.3324 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00077: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05. Epoch 78/300 279/279 [==============================] - 0s 111us/step - loss: 0.3323 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 79/300 279/279 [==============================] - 0s 97us/step - loss: 0.3322 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 80/300 279/279 [==============================] - 0s 97us/step - loss: 0.3322 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 81/300 279/279 [==============================] - 0s 97us/step - loss: 0.3321 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 82/300 279/279 [==============================] - 0s 104us/step - loss: 0.3321 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 83/300 279/279 [==============================] - 0s 86us/step - loss: 0.3320 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 84/300 279/279 [==============================] - 0s 86us/step - loss: 0.3320 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 85/300 279/279 [==============================] - 0s 111us/step - loss: 0.3319 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 86/300 279/279 [==============================] - 0s 93us/step - loss: 0.3319 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 87/300 279/279 [==============================] - 0s 90us/step - loss: 0.3318 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00087: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05. Epoch 88/300 279/279 [==============================] - 0s 90us/step - loss: 0.3318 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 89/300 279/279 [==============================] - 0s 104us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 90/300 279/279 [==============================] - 0s 100us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 91/300 279/279 [==============================] - 0s 93us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 92/300 279/279 [==============================] - 0s 93us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 93/300 279/279 [==============================] - 0s 93us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 94/300 279/279 [==============================] - 0s 100us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 95/300 279/279 [==============================] - 0s 90us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 96/300 279/279 [==============================] - 0s 97us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 97/300 279/279 [==============================] - 0s 100us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00097: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05. Epoch 98/300 279/279 [==============================] - 0s 97us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 99/300 279/279 [==============================] - ETA: 0s - loss: 0.3474 - accuracy: 0.81 - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 100/300 279/279 [==============================] - ETA: 0s - loss: 0.2551 - accuracy: 0.93 - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 101/300 279/279 [==============================] - 0s 107us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 102/300 279/279 [==============================] - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 103/300 279/279 [==============================] - 0s 86us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 104/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 105/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 106/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 107/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00107: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06. Epoch 108/300 279/279 [==============================] - 0s 100us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 109/300 279/279 [==============================] - ETA: 0s - loss: 0.2823 - accuracy: 0.90 - 0s 100us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 110/300 279/279 [==============================] - 0s 90us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 111/300 279/279 [==============================] - 0s 72us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 112/300 279/279 [==============================] - 0s 107us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 113/300 279/279 [==============================] - 0s 86us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 114/300 279/279 [==============================] - 0s 72us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 115/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 116/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 117/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00117: ReduceLROnPlateau reducing learning rate to 2.9296875254658516e-06. Epoch 118/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 119/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 120/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 121/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 122/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 123/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 124/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 125/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 126/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 127/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00127: ReduceLROnPlateau reducing learning rate to 1.4648437627329258e-06. Epoch 128/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 129/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 130/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 131/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 132/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 133/300 279/279 [==============================] - 0s 125us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 134/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 135/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 136/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 137/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00137: ReduceLROnPlateau reducing learning rate to 7.324218813664629e-07. Epoch 138/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 139/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 140/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 141/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 142/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 143/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 144/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 145/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 146/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 147/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00147: ReduceLROnPlateau reducing learning rate to 3.6621094068323146e-07. Epoch 148/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 149/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 150/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 151/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 152/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 153/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 154/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 155/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 156/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 157/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00157: ReduceLROnPlateau reducing learning rate to 1.8310547034161573e-07. Epoch 158/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 159/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 160/300 279/279 [==============================] - 0s 133us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 161/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 162/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 163/300 279/279 [==============================] - 0s 68us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 164/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 165/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 166/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 167/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00167: ReduceLROnPlateau reducing learning rate to 9.155273517080786e-08. Epoch 168/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 169/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 170/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 171/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 172/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 173/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 174/300 279/279 [==============================] - 0s 168us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 175/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 176/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 177/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00177: ReduceLROnPlateau reducing learning rate to 4.577636758540393e-08. Epoch 178/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 179/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 180/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 181/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 182/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 183/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 184/300 279/279 [==============================] - ETA: 0s - loss: 0.2749 - accuracy: 0.96 - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 185/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 186/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 187/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00187: ReduceLROnPlateau reducing learning rate to 2.2888183792701966e-08. Epoch 188/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 189/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 190/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 191/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 192/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 193/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 194/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 195/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 196/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 197/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00197: ReduceLROnPlateau reducing learning rate to 1.1444091896350983e-08. Epoch 198/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 199/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 200/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 201/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 202/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 203/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 204/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 205/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 206/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 207/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00207: ReduceLROnPlateau reducing learning rate to 5.7220459481754915e-09. Epoch 208/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 209/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 210/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 211/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 212/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 213/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 214/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 215/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 216/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 217/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00217: ReduceLROnPlateau reducing learning rate to 2.8610229740877458e-09. Epoch 218/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 219/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 220/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 221/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 222/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 223/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 224/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 225/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 226/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 227/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00227: ReduceLROnPlateau reducing learning rate to 1.4305114870438729e-09. Epoch 228/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 229/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 230/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 231/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 232/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 233/300 279/279 [==============================] - 0s 122us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 234/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 235/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 236/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 237/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00237: ReduceLROnPlateau reducing learning rate to 7.152557435219364e-10. Epoch 238/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 239/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 240/300 279/279 [==============================] - 0s 68us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 241/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 242/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 243/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 244/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 245/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 246/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 247/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00247: ReduceLROnPlateau reducing learning rate to 3.576278717609682e-10. Epoch 248/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 249/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 250/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 251/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 252/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 253/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 254/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 255/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 256/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 257/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00257: ReduceLROnPlateau reducing learning rate to 1.788139358804841e-10. Epoch 258/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 259/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 260/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 261/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 262/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 263/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 264/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 265/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 266/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 267/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00267: ReduceLROnPlateau reducing learning rate to 8.940696794024205e-11. Epoch 268/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 269/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 270/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 271/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 272/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 273/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 274/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 275/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 276/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 277/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00277: ReduceLROnPlateau reducing learning rate to 4.470348397012103e-11. Epoch 278/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 279/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 280/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 281/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 282/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 283/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 284/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 285/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 286/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 287/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00287: ReduceLROnPlateau reducing learning rate to 2.2351741985060514e-11. Epoch 288/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 289/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 290/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 291/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 292/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 293/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 294/300 279/279 [==============================] - 0s 118us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 295/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 296/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 297/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00297: ReduceLROnPlateau reducing learning rate to 1.1175870992530257e-11. Epoch 298/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 299/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 300/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 300)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
93/93 [==============================] - 0s 54us/step test loss: 0.48277782432494626, test accuracy: 0.7849462628364563
y_pred = model.predict(X_test)
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
0.34275618374558303
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.303210 | 1.253016 | -0.394054 | 0.592598 | 0.106623 | -2.083256 | 0.858313 | 0.801936 | 2.380580 | -1.304258 | 0.556361 | 1.949530 | -1.046692 |
| 1 | 0.647559 | -1.553511 | -1.648243 | -1.010792 | -0.857927 | 0.335856 | 0.884468 | 0.250703 | 0.298648 | 0.683922 | 1.599907 | 0.349480 | -0.484103 |
| 2 | 2.266625 | -0.333664 | 0.685765 | -2.001143 | -0.820018 | -2.442242 | -1.583451 | -2.793213 | -2.158376 | -2.431307 | -0.855856 | -0.471626 | -1.478884 |
| 3 | 0.672266 | -0.397422 | 0.105106 | -1.822060 | -1.335294 | -1.384110 | -0.608579 | -1.639581 | -2.081721 | -1.171877 | -1.102493 | -1.264328 | -1.165166 |
| 4 | 0.747622 | 0.110331 | -0.079109 | -1.108698 | -0.391749 | -0.448919 | 0.133859 | -0.843237 | -1.924086 | -0.222835 | 0.221819 | 0.017631 | -0.725177 |
| 5 | 2.072441 | -1.565884 | -0.268750 | -1.648648 | -3.149219 | -3.406801 | -4.332461 | -1.709990 | -1.313696 | -1.503431 | -2.036749 | -1.928213 | -2.657430 |
| 6 | -0.163654 | 0.470736 | 0.440011 | 0.594090 | -1.227236 | 0.409218 | 0.608496 | -0.948833 | -2.051031 | 0.892327 | 0.371683 | -0.318984 | 0.022251 |
| 7 | 0.477732 | -0.217651 | -0.908178 | -1.257961 | -1.360625 | -0.551388 | -0.599896 | 0.099066 | 1.299780 | 2.443060 | 0.417236 | -0.919898 | -0.916391 |
| 8 | 0.897786 | -3.040013 | 0.311694 | -0.386220 | -0.321124 | -0.221380 | -0.207002 | -0.127210 | 0.011073 | 0.211925 | -0.400748 | -0.296623 | -0.143419 |
| 9 | -0.977087 | 1.088438 | -0.184899 | -0.626934 | 0.577247 | 0.522552 | 1.150101 | 1.023214 | 0.136257 | 0.193237 | -0.496760 | 0.367549 | 0.378679 |
| 10 | 0.328615 | -1.292300 | -1.398337 | -0.677268 | -1.070980 | -0.075073 | -0.740061 | -0.424240 | -0.216693 | 0.633892 | -0.070397 | 0.960392 | 0.403827 |
| 11 | -0.199470 | 0.110219 | 0.238637 | 0.455154 | -0.116209 | 0.374450 | 0.078145 | 0.424005 | 0.633052 | -0.153498 | -0.647002 | 0.301135 | -0.000406 |
| 12 | -0.711256 | 0.124802 | 0.734425 | -0.445078 | -0.503247 | -0.323539 | 0.236246 | -0.572803 | -0.221112 | -0.206486 | -0.180516 | 0.119335 | 0.027470 |
| 13 | -0.806898 | -0.126740 | -0.383726 | 0.035489 | -1.164460 | -0.574335 | -0.633858 | -0.009812 | -0.131411 | 0.549197 | -0.257952 | 0.307916 | 0.814674 |
| 14 | -0.077242 | -0.331495 | 0.550493 | -0.008575 | -0.215759 | -1.260552 | -0.581296 | 0.369790 | -0.684267 | 0.792489 | -0.457321 | -0.704205 | -0.093986 |
| 15 | 0.244538 | 0.777957 | 0.464181 | 0.169574 | -0.433604 | -1.172185 | -1.866928 | 0.759778 | -0.372608 | 0.009766 | 0.964104 | 1.082661 | -0.506505 |
| 16 | 0.602329 | -0.035069 | 0.178352 | -0.036690 | 0.180302 | -0.769568 | 0.364535 | 0.996915 | 0.263984 | -0.829872 | -0.133422 | 0.601135 | -1.217336 |
| 17 | -0.570258 | -0.759570 | 0.108993 | 0.657477 | 0.342355 | -0.903388 | 0.112467 | -0.669060 | -0.661619 | 0.915675 | 1.620722 | -0.160697 | 0.379275 |
| 18 | -0.288268 | -1.202534 | -0.544058 | 0.295908 | 0.568680 | -1.416228 | 0.423676 | 0.041836 | -0.665694 | 0.699155 | -0.070704 | -0.429451 | 1.194384 |
| 19 | -0.497305 | -0.552590 | 0.332470 | 0.660607 | 0.293725 | -0.945647 | -1.269354 | 0.464095 | 1.166255 | 2.034233 | 2.037855 | 0.555927 | 0.423683 |
| 20 | 1.386141 | -0.516432 | -0.074640 | 0.751101 | -1.151864 | 0.155819 | -1.921431 | -3.381158 | -1.145758 | -1.197084 | 0.654749 | 1.636425 | 0.993236 |
| 21 | 0.076772 | 0.072900 | 0.122544 | 0.799017 | -1.121011 | -0.137599 | -1.150187 | -1.669293 | -1.110882 | -0.047217 | -0.034112 | -0.659214 | 1.160642 |
| 22 | 0.670757 | -0.167252 | -0.352765 | 0.189499 | -1.232602 | -0.168579 | -1.559900 | -1.850665 | -1.416478 | 0.031846 | 0.308193 | -0.956133 | 0.507231 |
| 23 | 0.639283 | 0.699164 | 0.621380 | -0.725771 | -0.890352 | 0.643955 | -1.097228 | 0.229756 | -0.091793 | -2.390193 | -0.825768 | -2.164531 | -0.772983 |
| 24 | -0.907399 | 2.155157 | 0.873522 | 1.655111 | 0.871099 | 1.083262 | -0.186962 | 0.373227 | -0.354082 | 0.573586 | 0.733097 | -0.986481 | 0.727511 |
| 25 | -1.152272 | 1.601470 | 0.221927 | 1.296592 | 0.572807 | 0.581774 | -0.479257 | 0.209504 | -0.636178 | 0.574450 | 0.615706 | -0.877894 | 0.941827 |
| 26 | -0.676596 | 0.405600 | 0.553370 | 0.691531 | -0.292469 | 0.626694 | 0.080413 | 0.246868 | -0.100975 | 0.606694 | -0.024154 | -1.553730 | -0.210884 |
| 27 | -0.704834 | -0.058170 | 0.609171 | -0.735340 | -0.512747 | 0.796018 | -0.405976 | 0.502120 | 0.717380 | -1.625431 | 0.825742 | -1.663942 | -0.379395 |
| 28 | 0.273095 | 1.014503 | -0.772750 | -0.065028 | -0.513485 | 0.235377 | -0.266144 | 1.373964 | 0.711880 | -1.261758 | 1.106463 | 0.515863 | 0.555866 |
| 29 | 1.216372 | 0.637021 | 0.649194 | 0.099873 | -0.816614 | 0.555439 | -1.272918 | -0.035862 | 0.154194 | -1.797465 | -0.177830 | -1.702118 | -1.136716 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 342 | 0.056741 | 0.169776 | 0.434163 | -0.208821 | 0.498957 | 0.349198 | 0.547068 | 0.250228 | -0.743894 | -0.497075 | 0.373497 | -0.025547 | 0.260645 |
| 343 | -0.845812 | -0.163165 | 0.268174 | 1.312135 | 1.241686 | 1.484484 | 1.279782 | -0.350179 | -0.266719 | -0.170434 | 0.147000 | -0.259175 | -0.323251 |
| 344 | 0.056854 | 0.089458 | -0.128149 | 0.123107 | -0.879175 | 0.172486 | 0.919301 | 0.727007 | -0.032509 | -0.594358 | -0.241536 | -0.339538 | -1.563800 |
| 345 | 1.322735 | -0.970372 | -1.058427 | 1.018282 | -1.228871 | 0.835533 | 1.462831 | -1.481872 | -2.024441 | 0.388890 | 2.395768 | -0.993539 | 0.301816 |
| 346 | -0.159679 | -0.200313 | -0.181878 | 0.221536 | -0.604018 | 0.554979 | 0.173592 | -1.137738 | -1.525377 | -0.382164 | 1.156959 | 0.545188 | -0.873936 |
| 347 | -0.510690 | -0.141874 | -0.170690 | -0.486309 | -1.066447 | -1.098392 | -1.513393 | -0.202811 | 0.062343 | 0.446348 | -0.029988 | -0.024432 | -0.978036 |
| 348 | 0.441393 | 0.403987 | 0.538948 | 1.253198 | -0.158511 | 0.497768 | 0.151471 | -0.006025 | 0.213458 | 0.119760 | -0.002312 | 0.139434 | -0.401118 |
| 349 | 0.548477 | 0.987769 | 0.505748 | 0.779668 | 0.504327 | -0.003400 | 0.200264 | 0.287803 | 0.084852 | -0.044437 | 0.769553 | 0.169816 | -0.581506 |
| 350 | 0.278851 | -0.150632 | 1.015313 | 0.158731 | -1.435466 | -0.910636 | 1.526971 | 0.810376 | -0.088268 | 2.273901 | 1.895682 | -0.573207 | 1.173543 |
| 351 | 1.781784 | -0.680962 | -0.140043 | 1.730156 | 0.760657 | 1.081874 | 0.686370 | -0.456141 | -0.310319 | 0.443108 | 0.067726 | -0.804283 | 0.268616 |
| 352 | 1.110023 | -0.419764 | -0.451242 | 1.471440 | 0.860531 | 0.858025 | 1.016472 | 0.013533 | -0.532955 | 0.597255 | -0.385255 | -1.299309 | 0.869963 |
| 353 | 0.463780 | 0.094111 | 0.074193 | 0.457058 | -0.494585 | -0.741218 | -1.615368 | -0.323890 | 0.179301 | -0.914854 | -0.881275 | -0.284568 | 0.516848 |
| 354 | 0.162857 | 1.300630 | -0.374191 | -0.148478 | -0.275205 | 0.936621 | -0.301931 | 0.926288 | -0.242039 | -1.217862 | -0.849053 | 0.381655 | 1.521222 |
| 355 | -0.261040 | 1.897992 | 0.324175 | 0.250461 | -0.326921 | 0.078347 | -0.794723 | 1.245895 | 0.561437 | 0.299601 | 0.612062 | 0.375109 | 0.668225 |
| 356 | -2.412627 | -0.912657 | 0.924859 | 1.091412 | -0.430459 | 0.991776 | 0.577087 | 0.366311 | 0.916132 | -0.010096 | -0.337066 | 0.723121 | 0.634413 |
| 357 | -1.610420 | -0.171488 | 1.308910 | 1.557149 | -0.783120 | 1.055891 | 0.070922 | 0.736289 | 0.651236 | -0.209692 | -0.293388 | 0.549580 | 0.947465 |
| 358 | -1.627642 | -0.225022 | 1.420291 | 1.585386 | -0.623077 | 1.204209 | 0.203574 | 0.815228 | 0.701131 | -0.111706 | -0.352897 | 0.552444 | 1.038487 |
| 359 | 1.658650 | 0.261694 | 0.694273 | -0.634006 | -0.742717 | -1.107684 | -0.040641 | 0.685375 | 0.704374 | 0.457634 | -0.012812 | -0.227444 | -0.311482 |
| 360 | -0.472450 | 1.290735 | 1.251486 | 0.902820 | 1.064267 | 0.319911 | 0.273062 | -0.004026 | -0.730129 | -0.487802 | -0.590033 | 0.917054 | 0.316796 |
| 361 | 0.145973 | 1.078298 | -0.110458 | 0.396705 | 0.465683 | 0.120005 | 0.324478 | 0.647014 | 0.406366 | 0.303529 | 0.342183 | 0.418467 | -0.257006 |
| 362 | 1.354053 | 0.408020 | -1.449365 | -0.144038 | 0.735070 | 1.458916 | -0.253049 | 0.476118 | 1.309448 | 1.981607 | 0.319930 | -0.734588 | -2.427842 |
| 363 | 3.546326 | -0.337767 | -0.983896 | -3.155084 | 1.922015 | 3.128359 | 1.576092 | 2.767242 | 2.734920 | 1.749030 | -1.432287 | -5.486282 | -3.776088 |
| 364 | 3.564797 | -0.492960 | -0.663172 | -2.465245 | 2.044991 | 3.045697 | 1.746383 | 2.238430 | 2.806354 | 2.318786 | -0.732814 | -5.203217 | -4.762769 |
| 365 | -0.480041 | 0.390140 | 0.283493 | 0.710367 | 0.436247 | 0.787936 | 0.149057 | 1.081200 | 1.130496 | 0.783116 | 1.174331 | 0.987069 | 1.027523 |
| 366 | 1.601344 | 1.120977 | 0.942690 | 0.218542 | 1.432015 | 1.975393 | 1.352637 | 0.851851 | 0.707687 | 1.420656 | -1.301018 | 0.996552 | 2.286308 |
| 367 | -1.388425 | 0.554214 | 2.322455 | 0.125526 | 0.168411 | 1.459935 | -0.011567 | 0.377516 | 1.801634 | 1.061665 | 1.432895 | 1.553952 | 0.335629 |
| 368 | -0.560849 | 0.191976 | -1.558597 | -1.925355 | 0.006144 | 1.582531 | 2.334107 | 1.472221 | 0.788027 | -0.066399 | -0.539592 | -0.089987 | 1.081026 |
| 369 | -0.015724 | 0.095384 | -0.050287 | 0.330265 | -0.857518 | -1.110181 | 0.457976 | 1.235890 | 0.937447 | 1.294180 | 1.196429 | 1.964913 | 0.788473 |
| 370 | -0.320091 | 0.789370 | -0.347116 | -0.257819 | -0.264866 | 0.119392 | 0.174557 | -0.102622 | -0.147261 | 0.927949 | 1.494185 | 0.617596 | -0.119013 |
| 371 | -1.397911 | 0.969347 | -0.218602 | 0.165675 | -1.187201 | -1.022111 | 0.090487 | -0.281353 | -1.520146 | -0.950575 | -1.733689 | -0.924046 | -0.731033 |
372 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[4836.0, 4367.616182778379, 3974.0733975762073, 3720.4477504020774, 3549.9823424876267, 3386.093163495573, 3254.3815559758523, 3136.90996141146, 3048.6934734702136, 2957.186911982338, 2855.333306370868, 2802.0220333671496, 2712.5965714921504, 2657.189981994876]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x244598a8630>]
K=3
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.303210 | 1.253016 | -0.394054 | 0.592598 | 0.106623 | -2.083256 | 0.858313 | 0.801936 | 2.380580 | -1.304258 | 0.556361 | 1.949530 | -1.046692 | 1 | 0 |
| 1 | 0.647559 | -1.553511 | -1.648243 | -1.010792 | -0.857927 | 0.335856 | 0.884468 | 0.250703 | 0.298648 | 0.683922 | 1.599907 | 0.349480 | -0.484103 | 1 | 0 |
| 2 | 2.266625 | -0.333664 | 0.685765 | -2.001143 | -0.820018 | -2.442242 | -1.583451 | -2.793213 | -2.158376 | -2.431307 | -0.855856 | -0.471626 | -1.478884 | 0 | 0 |
| 3 | 0.672266 | -0.397422 | 0.105106 | -1.822060 | -1.335294 | -1.384110 | -0.608579 | -1.639581 | -2.081721 | -1.171877 | -1.102493 | -1.264328 | -1.165166 | 0 | 0 |
| 4 | 0.747622 | 0.110331 | -0.079109 | -1.108698 | -0.391749 | -0.448919 | 0.133859 | -0.843237 | -1.924086 | -0.222835 | 0.221819 | 0.017631 | -0.725177 | 0 | 0 |
| 5 | 2.072441 | -1.565884 | -0.268750 | -1.648648 | -3.149219 | -3.406801 | -4.332461 | -1.709990 | -1.313696 | -1.503431 | -2.036749 | -1.928213 | -2.657430 | 0 | 0 |
| 6 | -0.163654 | 0.470736 | 0.440011 | 0.594090 | -1.227236 | 0.409218 | 0.608496 | -0.948833 | -2.051031 | 0.892327 | 0.371683 | -0.318984 | 0.022251 | 0 | 0 |
| 7 | 0.477732 | -0.217651 | -0.908178 | -1.257961 | -1.360625 | -0.551388 | -0.599896 | 0.099066 | 1.299780 | 2.443060 | 0.417236 | -0.919898 | -0.916391 | 1 | 0 |
| 8 | 0.897786 | -3.040013 | 0.311694 | -0.386220 | -0.321124 | -0.221380 | -0.207002 | -0.127210 | 0.011073 | 0.211925 | -0.400748 | -0.296623 | -0.143419 | 0 | 0 |
| 9 | -0.977087 | 1.088438 | -0.184899 | -0.626934 | 0.577247 | 0.522552 | 1.150101 | 1.023214 | 0.136257 | 0.193237 | -0.496760 | 0.367549 | 0.378679 | 1 | 0 |
| 10 | 0.328615 | -1.292300 | -1.398337 | -0.677268 | -1.070980 | -0.075073 | -0.740061 | -0.424240 | -0.216693 | 0.633892 | -0.070397 | 0.960392 | 0.403827 | 0 | 0 |
| 11 | -0.199470 | 0.110219 | 0.238637 | 0.455154 | -0.116209 | 0.374450 | 0.078145 | 0.424005 | 0.633052 | -0.153498 | -0.647002 | 0.301135 | -0.000406 | 1 | 0 |
| 12 | -0.711256 | 0.124802 | 0.734425 | -0.445078 | -0.503247 | -0.323539 | 0.236246 | -0.572803 | -0.221112 | -0.206486 | -0.180516 | 0.119335 | 0.027470 | 0 | 0 |
| 13 | -0.806898 | -0.126740 | -0.383726 | 0.035489 | -1.164460 | -0.574335 | -0.633858 | -0.009812 | -0.131411 | 0.549197 | -0.257952 | 0.307916 | 0.814674 | 0 | 0 |
| 14 | -0.077242 | -0.331495 | 0.550493 | -0.008575 | -0.215759 | -1.260552 | -0.581296 | 0.369790 | -0.684267 | 0.792489 | -0.457321 | -0.704205 | -0.093986 | 0 | 0 |
| 15 | 0.244538 | 0.777957 | 0.464181 | 0.169574 | -0.433604 | -1.172185 | -1.866928 | 0.759778 | -0.372608 | 0.009766 | 0.964104 | 1.082661 | -0.506505 | 0 | 0 |
| 16 | 0.602329 | -0.035069 | 0.178352 | -0.036690 | 0.180302 | -0.769568 | 0.364535 | 0.996915 | 0.263984 | -0.829872 | -0.133422 | 0.601135 | -1.217336 | 1 | 0 |
| 17 | -0.570258 | -0.759570 | 0.108993 | 0.657477 | 0.342355 | -0.903388 | 0.112467 | -0.669060 | -0.661619 | 0.915675 | 1.620722 | -0.160697 | 0.379275 | 0 | 0 |
| 18 | -0.288268 | -1.202534 | -0.544058 | 0.295908 | 0.568680 | -1.416228 | 0.423676 | 0.041836 | -0.665694 | 0.699155 | -0.070704 | -0.429451 | 1.194384 | 0 | 0 |
| 19 | -0.497305 | -0.552590 | 0.332470 | 0.660607 | 0.293725 | -0.945647 | -1.269354 | 0.464095 | 1.166255 | 2.034233 | 2.037855 | 0.555927 | 0.423683 | 1 | 0 |
| 20 | 1.386141 | -0.516432 | -0.074640 | 0.751101 | -1.151864 | 0.155819 | -1.921431 | -3.381158 | -1.145758 | -1.197084 | 0.654749 | 1.636425 | 0.993236 | 0 | 0 |
| 21 | 0.076772 | 0.072900 | 0.122544 | 0.799017 | -1.121011 | -0.137599 | -1.150187 | -1.669293 | -1.110882 | -0.047217 | -0.034112 | -0.659214 | 1.160642 | 0 | 0 |
| 22 | 0.670757 | -0.167252 | -0.352765 | 0.189499 | -1.232602 | -0.168579 | -1.559900 | -1.850665 | -1.416478 | 0.031846 | 0.308193 | -0.956133 | 0.507231 | 0 | 0 |
| 23 | 0.639283 | 0.699164 | 0.621380 | -0.725771 | -0.890352 | 0.643955 | -1.097228 | 0.229756 | -0.091793 | -2.390193 | -0.825768 | -2.164531 | -0.772983 | 1 | 0 |
| 24 | -0.907399 | 2.155157 | 0.873522 | 1.655111 | 0.871099 | 1.083262 | -0.186962 | 0.373227 | -0.354082 | 0.573586 | 0.733097 | -0.986481 | 0.727511 | 1 | 0 |
| 25 | -1.152272 | 1.601470 | 0.221927 | 1.296592 | 0.572807 | 0.581774 | -0.479257 | 0.209504 | -0.636178 | 0.574450 | 0.615706 | -0.877894 | 0.941827 | 1 | 0 |
| 26 | -0.676596 | 0.405600 | 0.553370 | 0.691531 | -0.292469 | 0.626694 | 0.080413 | 0.246868 | -0.100975 | 0.606694 | -0.024154 | -1.553730 | -0.210884 | 1 | 0 |
| 27 | -0.704834 | -0.058170 | 0.609171 | -0.735340 | -0.512747 | 0.796018 | -0.405976 | 0.502120 | 0.717380 | -1.625431 | 0.825742 | -1.663942 | -0.379395 | 1 | 0 |
| 28 | 0.273095 | 1.014503 | -0.772750 | -0.065028 | -0.513485 | 0.235377 | -0.266144 | 1.373964 | 0.711880 | -1.261758 | 1.106463 | 0.515863 | 0.555866 | 1 | 0 |
| 29 | 1.216372 | 0.637021 | 0.649194 | 0.099873 | -0.816614 | 0.555439 | -1.272918 | -0.035862 | 0.154194 | -1.797465 | -0.177830 | -1.702118 | -1.136716 | 1 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 342 | 0.056741 | 0.169776 | 0.434163 | -0.208821 | 0.498957 | 0.349198 | 0.547068 | 0.250228 | -0.743894 | -0.497075 | 0.373497 | -0.025547 | 0.260645 | 1 | 1 |
| 343 | -0.845812 | -0.163165 | 0.268174 | 1.312135 | 1.241686 | 1.484484 | 1.279782 | -0.350179 | -0.266719 | -0.170434 | 0.147000 | -0.259175 | -0.323251 | 1 | 1 |
| 344 | 0.056854 | 0.089458 | -0.128149 | 0.123107 | -0.879175 | 0.172486 | 0.919301 | 0.727007 | -0.032509 | -0.594358 | -0.241536 | -0.339538 | -1.563800 | 1 | 1 |
| 345 | 1.322735 | -0.970372 | -1.058427 | 1.018282 | -1.228871 | 0.835533 | 1.462831 | -1.481872 | -2.024441 | 0.388890 | 2.395768 | -0.993539 | 0.301816 | 0 | 1 |
| 346 | -0.159679 | -0.200313 | -0.181878 | 0.221536 | -0.604018 | 0.554979 | 0.173592 | -1.137738 | -1.525377 | -0.382164 | 1.156959 | 0.545188 | -0.873936 | 0 | 1 |
| 347 | -0.510690 | -0.141874 | -0.170690 | -0.486309 | -1.066447 | -1.098392 | -1.513393 | -0.202811 | 0.062343 | 0.446348 | -0.029988 | -0.024432 | -0.978036 | 0 | 1 |
| 348 | 0.441393 | 0.403987 | 0.538948 | 1.253198 | -0.158511 | 0.497768 | 0.151471 | -0.006025 | 0.213458 | 0.119760 | -0.002312 | 0.139434 | -0.401118 | 1 | 1 |
| 349 | 0.548477 | 0.987769 | 0.505748 | 0.779668 | 0.504327 | -0.003400 | 0.200264 | 0.287803 | 0.084852 | -0.044437 | 0.769553 | 0.169816 | -0.581506 | 1 | 1 |
| 350 | 0.278851 | -0.150632 | 1.015313 | 0.158731 | -1.435466 | -0.910636 | 1.526971 | 0.810376 | -0.088268 | 2.273901 | 1.895682 | -0.573207 | 1.173543 | 1 | 1 |
| 351 | 1.781784 | -0.680962 | -0.140043 | 1.730156 | 0.760657 | 1.081874 | 0.686370 | -0.456141 | -0.310319 | 0.443108 | 0.067726 | -0.804283 | 0.268616 | 1 | 1 |
| 352 | 1.110023 | -0.419764 | -0.451242 | 1.471440 | 0.860531 | 0.858025 | 1.016472 | 0.013533 | -0.532955 | 0.597255 | -0.385255 | -1.299309 | 0.869963 | 1 | 1 |
| 353 | 0.463780 | 0.094111 | 0.074193 | 0.457058 | -0.494585 | -0.741218 | -1.615368 | -0.323890 | 0.179301 | -0.914854 | -0.881275 | -0.284568 | 0.516848 | 0 | 1 |
| 354 | 0.162857 | 1.300630 | -0.374191 | -0.148478 | -0.275205 | 0.936621 | -0.301931 | 0.926288 | -0.242039 | -1.217862 | -0.849053 | 0.381655 | 1.521222 | 1 | 1 |
| 355 | -0.261040 | 1.897992 | 0.324175 | 0.250461 | -0.326921 | 0.078347 | -0.794723 | 1.245895 | 0.561437 | 0.299601 | 0.612062 | 0.375109 | 0.668225 | 1 | 1 |
| 356 | -2.412627 | -0.912657 | 0.924859 | 1.091412 | -0.430459 | 0.991776 | 0.577087 | 0.366311 | 0.916132 | -0.010096 | -0.337066 | 0.723121 | 0.634413 | 1 | 1 |
| 357 | -1.610420 | -0.171488 | 1.308910 | 1.557149 | -0.783120 | 1.055891 | 0.070922 | 0.736289 | 0.651236 | -0.209692 | -0.293388 | 0.549580 | 0.947465 | 1 | 1 |
| 358 | -1.627642 | -0.225022 | 1.420291 | 1.585386 | -0.623077 | 1.204209 | 0.203574 | 0.815228 | 0.701131 | -0.111706 | -0.352897 | 0.552444 | 1.038487 | 1 | 1 |
| 359 | 1.658650 | 0.261694 | 0.694273 | -0.634006 | -0.742717 | -1.107684 | -0.040641 | 0.685375 | 0.704374 | 0.457634 | -0.012812 | -0.227444 | -0.311482 | 1 | 1 |
| 360 | -0.472450 | 1.290735 | 1.251486 | 0.902820 | 1.064267 | 0.319911 | 0.273062 | -0.004026 | -0.730129 | -0.487802 | -0.590033 | 0.917054 | 0.316796 | 1 | 1 |
| 361 | 0.145973 | 1.078298 | -0.110458 | 0.396705 | 0.465683 | 0.120005 | 0.324478 | 0.647014 | 0.406366 | 0.303529 | 0.342183 | 0.418467 | -0.257006 | 1 | 1 |
| 362 | 1.354053 | 0.408020 | -1.449365 | -0.144038 | 0.735070 | 1.458916 | -0.253049 | 0.476118 | 1.309448 | 1.981607 | 0.319930 | -0.734588 | -2.427842 | 1 | 1 |
| 363 | 3.546326 | -0.337767 | -0.983896 | -3.155084 | 1.922015 | 3.128359 | 1.576092 | 2.767242 | 2.734920 | 1.749030 | -1.432287 | -5.486282 | -3.776088 | 2 | 1 |
| 364 | 3.564797 | -0.492960 | -0.663172 | -2.465245 | 2.044991 | 3.045697 | 1.746383 | 2.238430 | 2.806354 | 2.318786 | -0.732814 | -5.203217 | -4.762769 | 2 | 1 |
| 365 | -0.480041 | 0.390140 | 0.283493 | 0.710367 | 0.436247 | 0.787936 | 0.149057 | 1.081200 | 1.130496 | 0.783116 | 1.174331 | 0.987069 | 1.027523 | 1 | 1 |
| 366 | 1.601344 | 1.120977 | 0.942690 | 0.218542 | 1.432015 | 1.975393 | 1.352637 | 0.851851 | 0.707687 | 1.420656 | -1.301018 | 0.996552 | 2.286308 | 1 | 1 |
| 367 | -1.388425 | 0.554214 | 2.322455 | 0.125526 | 0.168411 | 1.459935 | -0.011567 | 0.377516 | 1.801634 | 1.061665 | 1.432895 | 1.553952 | 0.335629 | 1 | 1 |
| 368 | -0.560849 | 0.191976 | -1.558597 | -1.925355 | 0.006144 | 1.582531 | 2.334107 | 1.472221 | 0.788027 | -0.066399 | -0.539592 | -0.089987 | 1.081026 | 1 | 1 |
| 369 | -0.015724 | 0.095384 | -0.050287 | 0.330265 | -0.857518 | -1.110181 | 0.457976 | 1.235890 | 0.937447 | 1.294180 | 1.196429 | 1.964913 | 0.788473 | 1 | 1 |
| 370 | -0.320091 | 0.789370 | -0.347116 | -0.257819 | -0.264866 | 0.119392 | 0.174557 | -0.102622 | -0.147261 | 0.927949 | 1.494185 | 0.617596 | -0.119013 | 1 | 1 |
| 371 | -1.397911 | 0.969347 | -0.218602 | 0.165675 | -1.187201 | -1.022111 | 0.090487 | -0.281353 | -1.520146 | -0.950575 | -1.733689 | -0.924046 | -0.731033 | 0 | 1 |
372 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x2445d307358>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[0]))
X = df_n_ps_std_mfcc[1]
y = df_n_ps[1]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(279, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'tanh', 'hidden_layer_sizes': (20,), 'learning_rate_init': 0.006, 'max_iter': 300}, que permiten obtener un Accuracy de 82.08% y un Kappa del 43.49
Tiempo total: 29.12 minutos
C:\ProgramData\Anaconda3\lib\site-packages\sklearn\neural_network\multilayer_perceptron.py:564: ConvergenceWarning: Stochastic Optimizer: Maximum iterations (300) reached and the optimization hasn't converged yet. % self.max_iter, ConvergenceWarning)
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = 0.006
epochs = 300
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_3 (Dense) (None, 20) 280 _________________________________________________________________ dense_4 (Dense) (None, 1) 21 ================================================================= Total params: 301 Trainable params: 301 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 279 samples, validate on 93 samples Epoch 1/300 279/279 [==============================] - 1s 3ms/step - loss: 0.7063 - accuracy: 0.5197 - val_loss: 0.6660 - val_accuracy: 0.5914 Epoch 2/300 279/279 [==============================] - 0s 61us/step - loss: 0.6003 - accuracy: 0.6918 - val_loss: 0.5741 - val_accuracy: 0.7419 Epoch 3/300 279/279 [==============================] - 0s 68us/step - loss: 0.5317 - accuracy: 0.7348 - val_loss: 0.5278 - val_accuracy: 0.7527 Epoch 4/300 279/279 [==============================] - 0s 72us/step - loss: 0.4996 - accuracy: 0.7634 - val_loss: 0.5053 - val_accuracy: 0.7742 Epoch 5/300 279/279 [==============================] - 0s 75us/step - loss: 0.4770 - accuracy: 0.7778 - val_loss: 0.4938 - val_accuracy: 0.8065 Epoch 6/300 279/279 [==============================] - 0s 90us/step - loss: 0.4639 - accuracy: 0.7778 - val_loss: 0.4845 - val_accuracy: 0.7957 Epoch 7/300 279/279 [==============================] - 0s 82us/step - loss: 0.4526 - accuracy: 0.7849 - val_loss: 0.4813 - val_accuracy: 0.8172 Epoch 8/300 279/279 [==============================] - 0s 75us/step - loss: 0.4466 - accuracy: 0.7921 - val_loss: 0.4822 - val_accuracy: 0.8065 Epoch 9/300 279/279 [==============================] - 0s 79us/step - loss: 0.4412 - accuracy: 0.7993 - val_loss: 0.4819 - val_accuracy: 0.8172 Epoch 10/300 279/279 [==============================] - 0s 82us/step - loss: 0.4373 - accuracy: 0.8065 - val_loss: 0.4840 - val_accuracy: 0.7957 Epoch 11/300 279/279 [==============================] - 0s 86us/step - loss: 0.4343 - accuracy: 0.8136 - val_loss: 0.4827 - val_accuracy: 0.7957 Epoch 12/300 279/279 [==============================] - 0s 75us/step - loss: 0.4328 - accuracy: 0.8100 - val_loss: 0.4866 - val_accuracy: 0.7849 Epoch 13/300 279/279 [==============================] - 0s 82us/step - loss: 0.4225 - accuracy: 0.8136 - val_loss: 0.4860 - val_accuracy: 0.7957 Epoch 14/300 279/279 [==============================] - 0s 75us/step - loss: 0.4186 - accuracy: 0.8136 - val_loss: 0.4865 - val_accuracy: 0.7849 Epoch 15/300 279/279 [==============================] - 0s 82us/step - loss: 0.4118 - accuracy: 0.8136 - val_loss: 0.4846 - val_accuracy: 0.7849 Epoch 16/300 279/279 [==============================] - 0s 79us/step - loss: 0.4080 - accuracy: 0.8208 - val_loss: 0.4901 - val_accuracy: 0.7849 Epoch 17/300 279/279 [==============================] - 0s 107us/step - loss: 0.4009 - accuracy: 0.8351 - val_loss: 0.4878 - val_accuracy: 0.7742 Epoch 00017: ReduceLROnPlateau reducing learning rate to 0.003000000026077032. Epoch 18/300 279/279 [==============================] - 0s 107us/step - loss: 0.3950 - accuracy: 0.8387 - val_loss: 0.4864 - val_accuracy: 0.7742 Epoch 19/300 279/279 [==============================] - 0s 68us/step - loss: 0.3922 - accuracy: 0.8387 - val_loss: 0.4852 - val_accuracy: 0.7742 Epoch 20/300 279/279 [==============================] - 0s 86us/step - loss: 0.3886 - accuracy: 0.8530 - val_loss: 0.4807 - val_accuracy: 0.7849 Epoch 21/300 279/279 [==============================] - 0s 64us/step - loss: 0.3865 - accuracy: 0.8566 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 22/300 279/279 [==============================] - 0s 82us/step - loss: 0.3828 - accuracy: 0.8530 - val_loss: 0.4848 - val_accuracy: 0.7849 Epoch 23/300 279/279 [==============================] - 0s 79us/step - loss: 0.3800 - accuracy: 0.8566 - val_loss: 0.4855 - val_accuracy: 0.7849 Epoch 24/300 279/279 [==============================] - 0s 68us/step - loss: 0.3760 - accuracy: 0.8638 - val_loss: 0.4818 - val_accuracy: 0.7849 Epoch 25/300 279/279 [==============================] - 0s 79us/step - loss: 0.3744 - accuracy: 0.8602 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 26/300 279/279 [==============================] - 0s 86us/step - loss: 0.3705 - accuracy: 0.8710 - val_loss: 0.4781 - val_accuracy: 0.7849 Epoch 27/300 279/279 [==============================] - 0s 86us/step - loss: 0.3666 - accuracy: 0.8746 - val_loss: 0.4785 - val_accuracy: 0.7849 Epoch 00027: ReduceLROnPlateau reducing learning rate to 0.001500000013038516. Epoch 28/300 279/279 [==============================] - 0s 79us/step - loss: 0.3637 - accuracy: 0.8746 - val_loss: 0.4799 - val_accuracy: 0.7849 Epoch 29/300 279/279 [==============================] - 0s 75us/step - loss: 0.3620 - accuracy: 0.8746 - val_loss: 0.4816 - val_accuracy: 0.7849 Epoch 30/300 279/279 [==============================] - 0s 79us/step - loss: 0.3604 - accuracy: 0.8746 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 31/300 279/279 [==============================] - 0s 75us/step - loss: 0.3596 - accuracy: 0.8746 - val_loss: 0.4831 - val_accuracy: 0.7849 Epoch 32/300 279/279 [==============================] - 0s 79us/step - loss: 0.3572 - accuracy: 0.8781 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 33/300 279/279 [==============================] - 0s 79us/step - loss: 0.3555 - accuracy: 0.8781 - val_loss: 0.4817 - val_accuracy: 0.7849 Epoch 34/300 279/279 [==============================] - 0s 79us/step - loss: 0.3540 - accuracy: 0.8817 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 35/300 279/279 [==============================] - 0s 82us/step - loss: 0.3527 - accuracy: 0.8817 - val_loss: 0.4817 - val_accuracy: 0.7849 Epoch 36/300 279/279 [==============================] - 0s 75us/step - loss: 0.3510 - accuracy: 0.8817 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 37/300 279/279 [==============================] - 0s 104us/step - loss: 0.3493 - accuracy: 0.8781 - val_loss: 0.4836 - val_accuracy: 0.7849 Epoch 00037: ReduceLROnPlateau reducing learning rate to 0.000750000006519258. Epoch 38/300 279/279 [==============================] - 0s 86us/step - loss: 0.3476 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 39/300 279/279 [==============================] - 0s 82us/step - loss: 0.3469 - accuracy: 0.8817 - val_loss: 0.4823 - val_accuracy: 0.7849 Epoch 40/300 279/279 [==============================] - 0s 75us/step - loss: 0.3457 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 41/300 279/279 [==============================] - 0s 82us/step - loss: 0.3449 - accuracy: 0.8817 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 42/300 279/279 [==============================] - 0s 90us/step - loss: 0.3443 - accuracy: 0.8817 - val_loss: 0.4830 - val_accuracy: 0.7849 Epoch 43/300 279/279 [==============================] - 0s 104us/step - loss: 0.3434 - accuracy: 0.8817 - val_loss: 0.4833 - val_accuracy: 0.7849 Epoch 44/300 279/279 [==============================] - 0s 93us/step - loss: 0.3427 - accuracy: 0.8817 - val_loss: 0.4835 - val_accuracy: 0.7849 Epoch 45/300 279/279 [==============================] - 0s 90us/step - loss: 0.3418 - accuracy: 0.8817 - val_loss: 0.4831 - val_accuracy: 0.7849 Epoch 46/300 279/279 [==============================] - 0s 93us/step - loss: 0.3411 - accuracy: 0.8817 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 47/300 279/279 [==============================] - 0s 97us/step - loss: 0.3403 - accuracy: 0.8817 - val_loss: 0.4832 - val_accuracy: 0.7849 Epoch 00047: ReduceLROnPlateau reducing learning rate to 0.000375000003259629. Epoch 48/300 279/279 [==============================] - 0s 111us/step - loss: 0.3394 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 49/300 279/279 [==============================] - 0s 100us/step - loss: 0.3389 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 50/300 279/279 [==============================] - 0s 111us/step - loss: 0.3386 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 51/300 279/279 [==============================] - 0s 115us/step - loss: 0.3381 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 52/300 279/279 [==============================] - 0s 136us/step - loss: 0.3378 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 53/300 279/279 [==============================] - 0s 118us/step - loss: 0.3373 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 54/300 279/279 [==============================] - 0s 86us/step - loss: 0.3370 - accuracy: 0.8817 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 55/300 279/279 [==============================] - 0s 115us/step - loss: 0.3365 - accuracy: 0.8817 - val_loss: 0.4825 - val_accuracy: 0.7849 Epoch 56/300 279/279 [==============================] - 0s 107us/step - loss: 0.3362 - accuracy: 0.8817 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 57/300 279/279 [==============================] - 0s 122us/step - loss: 0.3358 - accuracy: 0.8853 - val_loss: 0.4830 - val_accuracy: 0.7849 Epoch 00057: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145. Epoch 58/300 279/279 [==============================] - 0s 104us/step - loss: 0.3353 - accuracy: 0.8853 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 59/300 279/279 [==============================] - 0s 107us/step - loss: 0.3351 - accuracy: 0.8853 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 60/300 279/279 [==============================] - 0s 104us/step - loss: 0.3349 - accuracy: 0.8853 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 61/300 279/279 [==============================] - 0s 104us/step - loss: 0.3347 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 62/300 279/279 [==============================] - 0s 100us/step - loss: 0.3345 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 63/300 279/279 [==============================] - 0s 79us/step - loss: 0.3343 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 64/300 279/279 [==============================] - 0s 90us/step - loss: 0.3341 - accuracy: 0.8889 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 65/300 279/279 [==============================] - 0s 93us/step - loss: 0.3339 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 66/300 279/279 [==============================] - 0s 104us/step - loss: 0.3337 - accuracy: 0.8889 - val_loss: 0.4829 - val_accuracy: 0.7849 Epoch 67/300 279/279 [==============================] - 0s 79us/step - loss: 0.3335 - accuracy: 0.8889 - val_loss: 0.4826 - val_accuracy: 0.7849 Epoch 00067: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05. Epoch 68/300 279/279 [==============================] - 0s 100us/step - loss: 0.3333 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 69/300 279/279 [==============================] - 0s 104us/step - loss: 0.3332 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 70/300 279/279 [==============================] - 0s 90us/step - loss: 0.3331 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 71/300 279/279 [==============================] - 0s 100us/step - loss: 0.3330 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 72/300 279/279 [==============================] - 0s 107us/step - loss: 0.3329 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 73/300 279/279 [==============================] - 0s 104us/step - loss: 0.3328 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 74/300 279/279 [==============================] - 0s 118us/step - loss: 0.3327 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 75/300 279/279 [==============================] - 0s 100us/step - loss: 0.3326 - accuracy: 0.8889 - val_loss: 0.4827 - val_accuracy: 0.7849 Epoch 76/300 279/279 [==============================] - 0s 90us/step - loss: 0.3325 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 77/300 279/279 [==============================] - 0s 72us/step - loss: 0.3324 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00077: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05. Epoch 78/300 279/279 [==============================] - 0s 111us/step - loss: 0.3323 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 79/300 279/279 [==============================] - 0s 97us/step - loss: 0.3322 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 80/300 279/279 [==============================] - 0s 97us/step - loss: 0.3322 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 81/300 279/279 [==============================] - 0s 97us/step - loss: 0.3321 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 82/300 279/279 [==============================] - 0s 104us/step - loss: 0.3321 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 83/300 279/279 [==============================] - 0s 86us/step - loss: 0.3320 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 84/300 279/279 [==============================] - 0s 86us/step - loss: 0.3320 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 85/300 279/279 [==============================] - 0s 111us/step - loss: 0.3319 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 86/300 279/279 [==============================] - 0s 93us/step - loss: 0.3319 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 87/300 279/279 [==============================] - 0s 90us/step - loss: 0.3318 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00087: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05. Epoch 88/300 279/279 [==============================] - 0s 90us/step - loss: 0.3318 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 89/300 279/279 [==============================] - 0s 104us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 90/300 279/279 [==============================] - 0s 100us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 91/300 279/279 [==============================] - 0s 93us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 92/300 279/279 [==============================] - 0s 93us/step - loss: 0.3317 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 93/300 279/279 [==============================] - 0s 93us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 94/300 279/279 [==============================] - 0s 100us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 95/300 279/279 [==============================] - 0s 90us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 96/300 279/279 [==============================] - 0s 97us/step - loss: 0.3316 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 97/300 279/279 [==============================] - 0s 100us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00097: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05. Epoch 98/300 279/279 [==============================] - 0s 97us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 99/300 279/279 [==============================] - ETA: 0s - loss: 0.3474 - accuracy: 0.81 - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 100/300 279/279 [==============================] - ETA: 0s - loss: 0.2551 - accuracy: 0.93 - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 101/300 279/279 [==============================] - 0s 107us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 102/300 279/279 [==============================] - 0s 93us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 103/300 279/279 [==============================] - 0s 86us/step - loss: 0.3315 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 104/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 105/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 106/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 107/300 279/279 [==============================] - 0s 97us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00107: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06. Epoch 108/300 279/279 [==============================] - 0s 100us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 109/300 279/279 [==============================] - ETA: 0s - loss: 0.2823 - accuracy: 0.90 - 0s 100us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 110/300 279/279 [==============================] - 0s 90us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 111/300 279/279 [==============================] - 0s 72us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 112/300 279/279 [==============================] - 0s 107us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 113/300 279/279 [==============================] - 0s 86us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 114/300 279/279 [==============================] - 0s 72us/step - loss: 0.3314 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 115/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 116/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 117/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00117: ReduceLROnPlateau reducing learning rate to 2.9296875254658516e-06. Epoch 118/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 119/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 120/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 121/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 122/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 123/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 124/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 125/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 126/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 127/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00127: ReduceLROnPlateau reducing learning rate to 1.4648437627329258e-06. Epoch 128/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 129/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 130/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 131/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 132/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 133/300 279/279 [==============================] - 0s 125us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 134/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 135/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 136/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 137/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00137: ReduceLROnPlateau reducing learning rate to 7.324218813664629e-07. Epoch 138/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 139/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 140/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 141/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 142/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 143/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 144/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 145/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 146/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 147/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00147: ReduceLROnPlateau reducing learning rate to 3.6621094068323146e-07. Epoch 148/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 149/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 150/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 151/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 152/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 153/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 154/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 155/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 156/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 157/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00157: ReduceLROnPlateau reducing learning rate to 1.8310547034161573e-07. Epoch 158/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 159/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 160/300 279/279 [==============================] - 0s 133us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 161/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 162/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 163/300 279/279 [==============================] - 0s 68us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 164/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 165/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 166/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 167/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00167: ReduceLROnPlateau reducing learning rate to 9.155273517080786e-08. Epoch 168/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 169/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 170/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 171/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 172/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 173/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 174/300 279/279 [==============================] - 0s 168us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 175/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 176/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 177/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00177: ReduceLROnPlateau reducing learning rate to 4.577636758540393e-08. Epoch 178/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 179/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 180/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 181/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 182/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 183/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 184/300 279/279 [==============================] - ETA: 0s - loss: 0.2749 - accuracy: 0.96 - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 185/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 186/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 187/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00187: ReduceLROnPlateau reducing learning rate to 2.2888183792701966e-08. Epoch 188/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 189/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 190/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 191/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 192/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 193/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 194/300 279/279 [==============================] - 0s 72us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 195/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 196/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 197/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00197: ReduceLROnPlateau reducing learning rate to 1.1444091896350983e-08. Epoch 198/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 199/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 200/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 201/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 202/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 203/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 204/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 205/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 206/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 207/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00207: ReduceLROnPlateau reducing learning rate to 5.7220459481754915e-09. Epoch 208/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 209/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 210/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 211/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 212/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 213/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 214/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 215/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 216/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 217/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00217: ReduceLROnPlateau reducing learning rate to 2.8610229740877458e-09. Epoch 218/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 219/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 220/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 221/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 222/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 223/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 224/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 225/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 226/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 227/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00227: ReduceLROnPlateau reducing learning rate to 1.4305114870438729e-09. Epoch 228/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 229/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 230/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 231/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 232/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 233/300 279/279 [==============================] - 0s 122us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 234/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 235/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 236/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 237/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00237: ReduceLROnPlateau reducing learning rate to 7.152557435219364e-10. Epoch 238/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 239/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 240/300 279/279 [==============================] - 0s 68us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 241/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 242/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 243/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 244/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 245/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 246/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 247/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00247: ReduceLROnPlateau reducing learning rate to 3.576278717609682e-10. Epoch 248/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 249/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 250/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 251/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 252/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 253/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 254/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 255/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 256/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 257/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00257: ReduceLROnPlateau reducing learning rate to 1.788139358804841e-10. Epoch 258/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 259/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 260/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 261/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 262/300 279/279 [==============================] - 0s 79us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 263/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 264/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 265/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 266/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 267/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00267: ReduceLROnPlateau reducing learning rate to 8.940696794024205e-11. Epoch 268/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 269/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 270/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 271/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 272/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 273/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 274/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 275/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 276/300 279/279 [==============================] - 0s 90us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 277/300 279/279 [==============================] - 0s 75us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00277: ReduceLROnPlateau reducing learning rate to 4.470348397012103e-11. Epoch 278/300 279/279 [==============================] - 0s 82us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 279/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 280/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 281/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 282/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 283/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 284/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 285/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 286/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 287/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00287: ReduceLROnPlateau reducing learning rate to 2.2351741985060514e-11. Epoch 288/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 289/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 290/300 279/279 [==============================] - 0s 97us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 291/300 279/279 [==============================] - 0s 111us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 292/300 279/279 [==============================] - 0s 115us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 293/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 294/300 279/279 [==============================] - 0s 118us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 295/300 279/279 [==============================] - 0s 86us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 296/300 279/279 [==============================] - 0s 104us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 297/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 00297: ReduceLROnPlateau reducing learning rate to 1.1175870992530257e-11. Epoch 298/300 279/279 [==============================] - 0s 107us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 299/300 279/279 [==============================] - 0s 93us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849 Epoch 300/300 279/279 [==============================] - 0s 100us/step - loss: 0.3313 - accuracy: 0.8889 - val_loss: 0.4828 - val_accuracy: 0.7849
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 300)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
93/93 [==============================] - 0s 54us/step test loss: 0.48277782432494626, test accuracy: 0.7849462628364563
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.7282608695652174
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.366754617414248
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.303210 | 1.253016 | -0.394054 | 0.592598 | 0.106623 | -2.083256 | 0.858313 | 0.801936 | 2.380580 | -1.304258 | 0.556361 | 1.949530 | -1.046692 |
| 1 | 0.647559 | -1.553511 | -1.648243 | -1.010792 | -0.857927 | 0.335856 | 0.884468 | 0.250703 | 0.298648 | 0.683922 | 1.599907 | 0.349480 | -0.484103 |
| 2 | 2.266625 | -0.333664 | 0.685765 | -2.001143 | -0.820018 | -2.442242 | -1.583451 | -2.793213 | -2.158376 | -2.431307 | -0.855856 | -0.471626 | -1.478884 |
| 3 | 0.672266 | -0.397422 | 0.105106 | -1.822060 | -1.335294 | -1.384110 | -0.608579 | -1.639581 | -2.081721 | -1.171877 | -1.102493 | -1.264328 | -1.165166 |
| 4 | 0.747622 | 0.110331 | -0.079109 | -1.108698 | -0.391749 | -0.448919 | 0.133859 | -0.843237 | -1.924086 | -0.222835 | 0.221819 | 0.017631 | -0.725177 |
| 5 | 2.072441 | -1.565884 | -0.268750 | -1.648648 | -3.149219 | -3.406801 | -4.332461 | -1.709990 | -1.313696 | -1.503431 | -2.036749 | -1.928213 | -2.657430 |
| 6 | -0.163654 | 0.470736 | 0.440011 | 0.594090 | -1.227236 | 0.409218 | 0.608496 | -0.948833 | -2.051031 | 0.892327 | 0.371683 | -0.318984 | 0.022251 |
| 7 | 0.477732 | -0.217651 | -0.908178 | -1.257961 | -1.360625 | -0.551388 | -0.599896 | 0.099066 | 1.299780 | 2.443060 | 0.417236 | -0.919898 | -0.916391 |
| 8 | 0.897786 | -3.040013 | 0.311694 | -0.386220 | -0.321124 | -0.221380 | -0.207002 | -0.127210 | 0.011073 | 0.211925 | -0.400748 | -0.296623 | -0.143419 |
| 9 | -0.977087 | 1.088438 | -0.184899 | -0.626934 | 0.577247 | 0.522552 | 1.150101 | 1.023214 | 0.136257 | 0.193237 | -0.496760 | 0.367549 | 0.378679 |
| 10 | 0.328615 | -1.292300 | -1.398337 | -0.677268 | -1.070980 | -0.075073 | -0.740061 | -0.424240 | -0.216693 | 0.633892 | -0.070397 | 0.960392 | 0.403827 |
| 11 | -0.199470 | 0.110219 | 0.238637 | 0.455154 | -0.116209 | 0.374450 | 0.078145 | 0.424005 | 0.633052 | -0.153498 | -0.647002 | 0.301135 | -0.000406 |
| 12 | -0.711256 | 0.124802 | 0.734425 | -0.445078 | -0.503247 | -0.323539 | 0.236246 | -0.572803 | -0.221112 | -0.206486 | -0.180516 | 0.119335 | 0.027470 |
| 13 | -0.806898 | -0.126740 | -0.383726 | 0.035489 | -1.164460 | -0.574335 | -0.633858 | -0.009812 | -0.131411 | 0.549197 | -0.257952 | 0.307916 | 0.814674 |
| 14 | -0.077242 | -0.331495 | 0.550493 | -0.008575 | -0.215759 | -1.260552 | -0.581296 | 0.369790 | -0.684267 | 0.792489 | -0.457321 | -0.704205 | -0.093986 |
| 15 | 0.244538 | 0.777957 | 0.464181 | 0.169574 | -0.433604 | -1.172185 | -1.866928 | 0.759778 | -0.372608 | 0.009766 | 0.964104 | 1.082661 | -0.506505 |
| 16 | 0.602329 | -0.035069 | 0.178352 | -0.036690 | 0.180302 | -0.769568 | 0.364535 | 0.996915 | 0.263984 | -0.829872 | -0.133422 | 0.601135 | -1.217336 |
| 17 | -0.570258 | -0.759570 | 0.108993 | 0.657477 | 0.342355 | -0.903388 | 0.112467 | -0.669060 | -0.661619 | 0.915675 | 1.620722 | -0.160697 | 0.379275 |
| 18 | -0.288268 | -1.202534 | -0.544058 | 0.295908 | 0.568680 | -1.416228 | 0.423676 | 0.041836 | -0.665694 | 0.699155 | -0.070704 | -0.429451 | 1.194384 |
| 19 | -0.497305 | -0.552590 | 0.332470 | 0.660607 | 0.293725 | -0.945647 | -1.269354 | 0.464095 | 1.166255 | 2.034233 | 2.037855 | 0.555927 | 0.423683 |
| 20 | 1.386141 | -0.516432 | -0.074640 | 0.751101 | -1.151864 | 0.155819 | -1.921431 | -3.381158 | -1.145758 | -1.197084 | 0.654749 | 1.636425 | 0.993236 |
| 21 | 0.076772 | 0.072900 | 0.122544 | 0.799017 | -1.121011 | -0.137599 | -1.150187 | -1.669293 | -1.110882 | -0.047217 | -0.034112 | -0.659214 | 1.160642 |
| 22 | 0.670757 | -0.167252 | -0.352765 | 0.189499 | -1.232602 | -0.168579 | -1.559900 | -1.850665 | -1.416478 | 0.031846 | 0.308193 | -0.956133 | 0.507231 |
| 23 | 0.639283 | 0.699164 | 0.621380 | -0.725771 | -0.890352 | 0.643955 | -1.097228 | 0.229756 | -0.091793 | -2.390193 | -0.825768 | -2.164531 | -0.772983 |
| 24 | -0.907399 | 2.155157 | 0.873522 | 1.655111 | 0.871099 | 1.083262 | -0.186962 | 0.373227 | -0.354082 | 0.573586 | 0.733097 | -0.986481 | 0.727511 |
| 25 | -1.152272 | 1.601470 | 0.221927 | 1.296592 | 0.572807 | 0.581774 | -0.479257 | 0.209504 | -0.636178 | 0.574450 | 0.615706 | -0.877894 | 0.941827 |
| 26 | -0.676596 | 0.405600 | 0.553370 | 0.691531 | -0.292469 | 0.626694 | 0.080413 | 0.246868 | -0.100975 | 0.606694 | -0.024154 | -1.553730 | -0.210884 |
| 27 | -0.704834 | -0.058170 | 0.609171 | -0.735340 | -0.512747 | 0.796018 | -0.405976 | 0.502120 | 0.717380 | -1.625431 | 0.825742 | -1.663942 | -0.379395 |
| 28 | 0.273095 | 1.014503 | -0.772750 | -0.065028 | -0.513485 | 0.235377 | -0.266144 | 1.373964 | 0.711880 | -1.261758 | 1.106463 | 0.515863 | 0.555866 |
| 29 | 1.216372 | 0.637021 | 0.649194 | 0.099873 | -0.816614 | 0.555439 | -1.272918 | -0.035862 | 0.154194 | -1.797465 | -0.177830 | -1.702118 | -1.136716 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 342 | 0.056741 | 0.169776 | 0.434163 | -0.208821 | 0.498957 | 0.349198 | 0.547068 | 0.250228 | -0.743894 | -0.497075 | 0.373497 | -0.025547 | 0.260645 |
| 343 | -0.845812 | -0.163165 | 0.268174 | 1.312135 | 1.241686 | 1.484484 | 1.279782 | -0.350179 | -0.266719 | -0.170434 | 0.147000 | -0.259175 | -0.323251 |
| 344 | 0.056854 | 0.089458 | -0.128149 | 0.123107 | -0.879175 | 0.172486 | 0.919301 | 0.727007 | -0.032509 | -0.594358 | -0.241536 | -0.339538 | -1.563800 |
| 345 | 1.322735 | -0.970372 | -1.058427 | 1.018282 | -1.228871 | 0.835533 | 1.462831 | -1.481872 | -2.024441 | 0.388890 | 2.395768 | -0.993539 | 0.301816 |
| 346 | -0.159679 | -0.200313 | -0.181878 | 0.221536 | -0.604018 | 0.554979 | 0.173592 | -1.137738 | -1.525377 | -0.382164 | 1.156959 | 0.545188 | -0.873936 |
| 347 | -0.510690 | -0.141874 | -0.170690 | -0.486309 | -1.066447 | -1.098392 | -1.513393 | -0.202811 | 0.062343 | 0.446348 | -0.029988 | -0.024432 | -0.978036 |
| 348 | 0.441393 | 0.403987 | 0.538948 | 1.253198 | -0.158511 | 0.497768 | 0.151471 | -0.006025 | 0.213458 | 0.119760 | -0.002312 | 0.139434 | -0.401118 |
| 349 | 0.548477 | 0.987769 | 0.505748 | 0.779668 | 0.504327 | -0.003400 | 0.200264 | 0.287803 | 0.084852 | -0.044437 | 0.769553 | 0.169816 | -0.581506 |
| 350 | 0.278851 | -0.150632 | 1.015313 | 0.158731 | -1.435466 | -0.910636 | 1.526971 | 0.810376 | -0.088268 | 2.273901 | 1.895682 | -0.573207 | 1.173543 |
| 351 | 1.781784 | -0.680962 | -0.140043 | 1.730156 | 0.760657 | 1.081874 | 0.686370 | -0.456141 | -0.310319 | 0.443108 | 0.067726 | -0.804283 | 0.268616 |
| 352 | 1.110023 | -0.419764 | -0.451242 | 1.471440 | 0.860531 | 0.858025 | 1.016472 | 0.013533 | -0.532955 | 0.597255 | -0.385255 | -1.299309 | 0.869963 |
| 353 | 0.463780 | 0.094111 | 0.074193 | 0.457058 | -0.494585 | -0.741218 | -1.615368 | -0.323890 | 0.179301 | -0.914854 | -0.881275 | -0.284568 | 0.516848 |
| 354 | 0.162857 | 1.300630 | -0.374191 | -0.148478 | -0.275205 | 0.936621 | -0.301931 | 0.926288 | -0.242039 | -1.217862 | -0.849053 | 0.381655 | 1.521222 |
| 355 | -0.261040 | 1.897992 | 0.324175 | 0.250461 | -0.326921 | 0.078347 | -0.794723 | 1.245895 | 0.561437 | 0.299601 | 0.612062 | 0.375109 | 0.668225 |
| 356 | -2.412627 | -0.912657 | 0.924859 | 1.091412 | -0.430459 | 0.991776 | 0.577087 | 0.366311 | 0.916132 | -0.010096 | -0.337066 | 0.723121 | 0.634413 |
| 357 | -1.610420 | -0.171488 | 1.308910 | 1.557149 | -0.783120 | 1.055891 | 0.070922 | 0.736289 | 0.651236 | -0.209692 | -0.293388 | 0.549580 | 0.947465 |
| 358 | -1.627642 | -0.225022 | 1.420291 | 1.585386 | -0.623077 | 1.204209 | 0.203574 | 0.815228 | 0.701131 | -0.111706 | -0.352897 | 0.552444 | 1.038487 |
| 359 | 1.658650 | 0.261694 | 0.694273 | -0.634006 | -0.742717 | -1.107684 | -0.040641 | 0.685375 | 0.704374 | 0.457634 | -0.012812 | -0.227444 | -0.311482 |
| 360 | -0.472450 | 1.290735 | 1.251486 | 0.902820 | 1.064267 | 0.319911 | 0.273062 | -0.004026 | -0.730129 | -0.487802 | -0.590033 | 0.917054 | 0.316796 |
| 361 | 0.145973 | 1.078298 | -0.110458 | 0.396705 | 0.465683 | 0.120005 | 0.324478 | 0.647014 | 0.406366 | 0.303529 | 0.342183 | 0.418467 | -0.257006 |
| 362 | 1.354053 | 0.408020 | -1.449365 | -0.144038 | 0.735070 | 1.458916 | -0.253049 | 0.476118 | 1.309448 | 1.981607 | 0.319930 | -0.734588 | -2.427842 |
| 363 | 3.546326 | -0.337767 | -0.983896 | -3.155084 | 1.922015 | 3.128359 | 1.576092 | 2.767242 | 2.734920 | 1.749030 | -1.432287 | -5.486282 | -3.776088 |
| 364 | 3.564797 | -0.492960 | -0.663172 | -2.465245 | 2.044991 | 3.045697 | 1.746383 | 2.238430 | 2.806354 | 2.318786 | -0.732814 | -5.203217 | -4.762769 |
| 365 | -0.480041 | 0.390140 | 0.283493 | 0.710367 | 0.436247 | 0.787936 | 0.149057 | 1.081200 | 1.130496 | 0.783116 | 1.174331 | 0.987069 | 1.027523 |
| 366 | 1.601344 | 1.120977 | 0.942690 | 0.218542 | 1.432015 | 1.975393 | 1.352637 | 0.851851 | 0.707687 | 1.420656 | -1.301018 | 0.996552 | 2.286308 |
| 367 | -1.388425 | 0.554214 | 2.322455 | 0.125526 | 0.168411 | 1.459935 | -0.011567 | 0.377516 | 1.801634 | 1.061665 | 1.432895 | 1.553952 | 0.335629 |
| 368 | -0.560849 | 0.191976 | -1.558597 | -1.925355 | 0.006144 | 1.582531 | 2.334107 | 1.472221 | 0.788027 | -0.066399 | -0.539592 | -0.089987 | 1.081026 |
| 369 | -0.015724 | 0.095384 | -0.050287 | 0.330265 | -0.857518 | -1.110181 | 0.457976 | 1.235890 | 0.937447 | 1.294180 | 1.196429 | 1.964913 | 0.788473 |
| 370 | -0.320091 | 0.789370 | -0.347116 | -0.257819 | -0.264866 | 0.119392 | 0.174557 | -0.102622 | -0.147261 | 0.927949 | 1.494185 | 0.617596 | -0.119013 |
| 371 | -1.397911 | 0.969347 | -0.218602 | 0.165675 | -1.187201 | -1.022111 | 0.090487 | -0.281353 | -1.520146 | -0.950575 | -1.733689 | -0.924046 | -0.731033 |
372 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[4836.0, 4367.616182778379, 3974.0733975762073, 3720.4477504020774, 3549.9823424876267, 3386.093163495573, 3254.3815559758523, 3136.90996141146, 3048.6934734702136, 2957.186911982338, 2855.333306370868, 2802.0220333671496, 2712.5965714921504, 2657.189981994876]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x244598a8630>]
K=3
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1,
1, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0,
1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,
1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1,
1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 1,
1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0,
1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
0, 1, 1, 2, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,
1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1,
1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 0])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.303210 | 1.253016 | -0.394054 | 0.592598 | 0.106623 | -2.083256 | 0.858313 | 0.801936 | 2.380580 | -1.304258 | 0.556361 | 1.949530 | -1.046692 | 1 | 0 |
| 1 | 0.647559 | -1.553511 | -1.648243 | -1.010792 | -0.857927 | 0.335856 | 0.884468 | 0.250703 | 0.298648 | 0.683922 | 1.599907 | 0.349480 | -0.484103 | 1 | 0 |
| 2 | 2.266625 | -0.333664 | 0.685765 | -2.001143 | -0.820018 | -2.442242 | -1.583451 | -2.793213 | -2.158376 | -2.431307 | -0.855856 | -0.471626 | -1.478884 | 0 | 0 |
| 3 | 0.672266 | -0.397422 | 0.105106 | -1.822060 | -1.335294 | -1.384110 | -0.608579 | -1.639581 | -2.081721 | -1.171877 | -1.102493 | -1.264328 | -1.165166 | 0 | 0 |
| 4 | 0.747622 | 0.110331 | -0.079109 | -1.108698 | -0.391749 | -0.448919 | 0.133859 | -0.843237 | -1.924086 | -0.222835 | 0.221819 | 0.017631 | -0.725177 | 0 | 0 |
| 5 | 2.072441 | -1.565884 | -0.268750 | -1.648648 | -3.149219 | -3.406801 | -4.332461 | -1.709990 | -1.313696 | -1.503431 | -2.036749 | -1.928213 | -2.657430 | 0 | 0 |
| 6 | -0.163654 | 0.470736 | 0.440011 | 0.594090 | -1.227236 | 0.409218 | 0.608496 | -0.948833 | -2.051031 | 0.892327 | 0.371683 | -0.318984 | 0.022251 | 0 | 0 |
| 7 | 0.477732 | -0.217651 | -0.908178 | -1.257961 | -1.360625 | -0.551388 | -0.599896 | 0.099066 | 1.299780 | 2.443060 | 0.417236 | -0.919898 | -0.916391 | 1 | 0 |
| 8 | 0.897786 | -3.040013 | 0.311694 | -0.386220 | -0.321124 | -0.221380 | -0.207002 | -0.127210 | 0.011073 | 0.211925 | -0.400748 | -0.296623 | -0.143419 | 0 | 0 |
| 9 | -0.977087 | 1.088438 | -0.184899 | -0.626934 | 0.577247 | 0.522552 | 1.150101 | 1.023214 | 0.136257 | 0.193237 | -0.496760 | 0.367549 | 0.378679 | 1 | 0 |
| 10 | 0.328615 | -1.292300 | -1.398337 | -0.677268 | -1.070980 | -0.075073 | -0.740061 | -0.424240 | -0.216693 | 0.633892 | -0.070397 | 0.960392 | 0.403827 | 0 | 0 |
| 11 | -0.199470 | 0.110219 | 0.238637 | 0.455154 | -0.116209 | 0.374450 | 0.078145 | 0.424005 | 0.633052 | -0.153498 | -0.647002 | 0.301135 | -0.000406 | 1 | 0 |
| 12 | -0.711256 | 0.124802 | 0.734425 | -0.445078 | -0.503247 | -0.323539 | 0.236246 | -0.572803 | -0.221112 | -0.206486 | -0.180516 | 0.119335 | 0.027470 | 0 | 0 |
| 13 | -0.806898 | -0.126740 | -0.383726 | 0.035489 | -1.164460 | -0.574335 | -0.633858 | -0.009812 | -0.131411 | 0.549197 | -0.257952 | 0.307916 | 0.814674 | 0 | 0 |
| 14 | -0.077242 | -0.331495 | 0.550493 | -0.008575 | -0.215759 | -1.260552 | -0.581296 | 0.369790 | -0.684267 | 0.792489 | -0.457321 | -0.704205 | -0.093986 | 0 | 0 |
| 15 | 0.244538 | 0.777957 | 0.464181 | 0.169574 | -0.433604 | -1.172185 | -1.866928 | 0.759778 | -0.372608 | 0.009766 | 0.964104 | 1.082661 | -0.506505 | 0 | 0 |
| 16 | 0.602329 | -0.035069 | 0.178352 | -0.036690 | 0.180302 | -0.769568 | 0.364535 | 0.996915 | 0.263984 | -0.829872 | -0.133422 | 0.601135 | -1.217336 | 1 | 0 |
| 17 | -0.570258 | -0.759570 | 0.108993 | 0.657477 | 0.342355 | -0.903388 | 0.112467 | -0.669060 | -0.661619 | 0.915675 | 1.620722 | -0.160697 | 0.379275 | 0 | 0 |
| 18 | -0.288268 | -1.202534 | -0.544058 | 0.295908 | 0.568680 | -1.416228 | 0.423676 | 0.041836 | -0.665694 | 0.699155 | -0.070704 | -0.429451 | 1.194384 | 0 | 0 |
| 19 | -0.497305 | -0.552590 | 0.332470 | 0.660607 | 0.293725 | -0.945647 | -1.269354 | 0.464095 | 1.166255 | 2.034233 | 2.037855 | 0.555927 | 0.423683 | 1 | 0 |
| 20 | 1.386141 | -0.516432 | -0.074640 | 0.751101 | -1.151864 | 0.155819 | -1.921431 | -3.381158 | -1.145758 | -1.197084 | 0.654749 | 1.636425 | 0.993236 | 0 | 0 |
| 21 | 0.076772 | 0.072900 | 0.122544 | 0.799017 | -1.121011 | -0.137599 | -1.150187 | -1.669293 | -1.110882 | -0.047217 | -0.034112 | -0.659214 | 1.160642 | 0 | 0 |
| 22 | 0.670757 | -0.167252 | -0.352765 | 0.189499 | -1.232602 | -0.168579 | -1.559900 | -1.850665 | -1.416478 | 0.031846 | 0.308193 | -0.956133 | 0.507231 | 0 | 0 |
| 23 | 0.639283 | 0.699164 | 0.621380 | -0.725771 | -0.890352 | 0.643955 | -1.097228 | 0.229756 | -0.091793 | -2.390193 | -0.825768 | -2.164531 | -0.772983 | 1 | 0 |
| 24 | -0.907399 | 2.155157 | 0.873522 | 1.655111 | 0.871099 | 1.083262 | -0.186962 | 0.373227 | -0.354082 | 0.573586 | 0.733097 | -0.986481 | 0.727511 | 1 | 0 |
| 25 | -1.152272 | 1.601470 | 0.221927 | 1.296592 | 0.572807 | 0.581774 | -0.479257 | 0.209504 | -0.636178 | 0.574450 | 0.615706 | -0.877894 | 0.941827 | 1 | 0 |
| 26 | -0.676596 | 0.405600 | 0.553370 | 0.691531 | -0.292469 | 0.626694 | 0.080413 | 0.246868 | -0.100975 | 0.606694 | -0.024154 | -1.553730 | -0.210884 | 1 | 0 |
| 27 | -0.704834 | -0.058170 | 0.609171 | -0.735340 | -0.512747 | 0.796018 | -0.405976 | 0.502120 | 0.717380 | -1.625431 | 0.825742 | -1.663942 | -0.379395 | 1 | 0 |
| 28 | 0.273095 | 1.014503 | -0.772750 | -0.065028 | -0.513485 | 0.235377 | -0.266144 | 1.373964 | 0.711880 | -1.261758 | 1.106463 | 0.515863 | 0.555866 | 1 | 0 |
| 29 | 1.216372 | 0.637021 | 0.649194 | 0.099873 | -0.816614 | 0.555439 | -1.272918 | -0.035862 | 0.154194 | -1.797465 | -0.177830 | -1.702118 | -1.136716 | 1 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 342 | 0.056741 | 0.169776 | 0.434163 | -0.208821 | 0.498957 | 0.349198 | 0.547068 | 0.250228 | -0.743894 | -0.497075 | 0.373497 | -0.025547 | 0.260645 | 1 | 1 |
| 343 | -0.845812 | -0.163165 | 0.268174 | 1.312135 | 1.241686 | 1.484484 | 1.279782 | -0.350179 | -0.266719 | -0.170434 | 0.147000 | -0.259175 | -0.323251 | 1 | 1 |
| 344 | 0.056854 | 0.089458 | -0.128149 | 0.123107 | -0.879175 | 0.172486 | 0.919301 | 0.727007 | -0.032509 | -0.594358 | -0.241536 | -0.339538 | -1.563800 | 1 | 1 |
| 345 | 1.322735 | -0.970372 | -1.058427 | 1.018282 | -1.228871 | 0.835533 | 1.462831 | -1.481872 | -2.024441 | 0.388890 | 2.395768 | -0.993539 | 0.301816 | 0 | 1 |
| 346 | -0.159679 | -0.200313 | -0.181878 | 0.221536 | -0.604018 | 0.554979 | 0.173592 | -1.137738 | -1.525377 | -0.382164 | 1.156959 | 0.545188 | -0.873936 | 0 | 1 |
| 347 | -0.510690 | -0.141874 | -0.170690 | -0.486309 | -1.066447 | -1.098392 | -1.513393 | -0.202811 | 0.062343 | 0.446348 | -0.029988 | -0.024432 | -0.978036 | 0 | 1 |
| 348 | 0.441393 | 0.403987 | 0.538948 | 1.253198 | -0.158511 | 0.497768 | 0.151471 | -0.006025 | 0.213458 | 0.119760 | -0.002312 | 0.139434 | -0.401118 | 1 | 1 |
| 349 | 0.548477 | 0.987769 | 0.505748 | 0.779668 | 0.504327 | -0.003400 | 0.200264 | 0.287803 | 0.084852 | -0.044437 | 0.769553 | 0.169816 | -0.581506 | 1 | 1 |
| 350 | 0.278851 | -0.150632 | 1.015313 | 0.158731 | -1.435466 | -0.910636 | 1.526971 | 0.810376 | -0.088268 | 2.273901 | 1.895682 | -0.573207 | 1.173543 | 1 | 1 |
| 351 | 1.781784 | -0.680962 | -0.140043 | 1.730156 | 0.760657 | 1.081874 | 0.686370 | -0.456141 | -0.310319 | 0.443108 | 0.067726 | -0.804283 | 0.268616 | 1 | 1 |
| 352 | 1.110023 | -0.419764 | -0.451242 | 1.471440 | 0.860531 | 0.858025 | 1.016472 | 0.013533 | -0.532955 | 0.597255 | -0.385255 | -1.299309 | 0.869963 | 1 | 1 |
| 353 | 0.463780 | 0.094111 | 0.074193 | 0.457058 | -0.494585 | -0.741218 | -1.615368 | -0.323890 | 0.179301 | -0.914854 | -0.881275 | -0.284568 | 0.516848 | 0 | 1 |
| 354 | 0.162857 | 1.300630 | -0.374191 | -0.148478 | -0.275205 | 0.936621 | -0.301931 | 0.926288 | -0.242039 | -1.217862 | -0.849053 | 0.381655 | 1.521222 | 1 | 1 |
| 355 | -0.261040 | 1.897992 | 0.324175 | 0.250461 | -0.326921 | 0.078347 | -0.794723 | 1.245895 | 0.561437 | 0.299601 | 0.612062 | 0.375109 | 0.668225 | 1 | 1 |
| 356 | -2.412627 | -0.912657 | 0.924859 | 1.091412 | -0.430459 | 0.991776 | 0.577087 | 0.366311 | 0.916132 | -0.010096 | -0.337066 | 0.723121 | 0.634413 | 1 | 1 |
| 357 | -1.610420 | -0.171488 | 1.308910 | 1.557149 | -0.783120 | 1.055891 | 0.070922 | 0.736289 | 0.651236 | -0.209692 | -0.293388 | 0.549580 | 0.947465 | 1 | 1 |
| 358 | -1.627642 | -0.225022 | 1.420291 | 1.585386 | -0.623077 | 1.204209 | 0.203574 | 0.815228 | 0.701131 | -0.111706 | -0.352897 | 0.552444 | 1.038487 | 1 | 1 |
| 359 | 1.658650 | 0.261694 | 0.694273 | -0.634006 | -0.742717 | -1.107684 | -0.040641 | 0.685375 | 0.704374 | 0.457634 | -0.012812 | -0.227444 | -0.311482 | 1 | 1 |
| 360 | -0.472450 | 1.290735 | 1.251486 | 0.902820 | 1.064267 | 0.319911 | 0.273062 | -0.004026 | -0.730129 | -0.487802 | -0.590033 | 0.917054 | 0.316796 | 1 | 1 |
| 361 | 0.145973 | 1.078298 | -0.110458 | 0.396705 | 0.465683 | 0.120005 | 0.324478 | 0.647014 | 0.406366 | 0.303529 | 0.342183 | 0.418467 | -0.257006 | 1 | 1 |
| 362 | 1.354053 | 0.408020 | -1.449365 | -0.144038 | 0.735070 | 1.458916 | -0.253049 | 0.476118 | 1.309448 | 1.981607 | 0.319930 | -0.734588 | -2.427842 | 1 | 1 |
| 363 | 3.546326 | -0.337767 | -0.983896 | -3.155084 | 1.922015 | 3.128359 | 1.576092 | 2.767242 | 2.734920 | 1.749030 | -1.432287 | -5.486282 | -3.776088 | 2 | 1 |
| 364 | 3.564797 | -0.492960 | -0.663172 | -2.465245 | 2.044991 | 3.045697 | 1.746383 | 2.238430 | 2.806354 | 2.318786 | -0.732814 | -5.203217 | -4.762769 | 2 | 1 |
| 365 | -0.480041 | 0.390140 | 0.283493 | 0.710367 | 0.436247 | 0.787936 | 0.149057 | 1.081200 | 1.130496 | 0.783116 | 1.174331 | 0.987069 | 1.027523 | 1 | 1 |
| 366 | 1.601344 | 1.120977 | 0.942690 | 0.218542 | 1.432015 | 1.975393 | 1.352637 | 0.851851 | 0.707687 | 1.420656 | -1.301018 | 0.996552 | 2.286308 | 1 | 1 |
| 367 | -1.388425 | 0.554214 | 2.322455 | 0.125526 | 0.168411 | 1.459935 | -0.011567 | 0.377516 | 1.801634 | 1.061665 | 1.432895 | 1.553952 | 0.335629 | 1 | 1 |
| 368 | -0.560849 | 0.191976 | -1.558597 | -1.925355 | 0.006144 | 1.582531 | 2.334107 | 1.472221 | 0.788027 | -0.066399 | -0.539592 | -0.089987 | 1.081026 | 1 | 1 |
| 369 | -0.015724 | 0.095384 | -0.050287 | 0.330265 | -0.857518 | -1.110181 | 0.457976 | 1.235890 | 0.937447 | 1.294180 | 1.196429 | 1.964913 | 0.788473 | 1 | 1 |
| 370 | -0.320091 | 0.789370 | -0.347116 | -0.257819 | -0.264866 | 0.119392 | 0.174557 | -0.102622 | -0.147261 | 0.927949 | 1.494185 | 0.617596 | -0.119013 | 1 | 1 |
| 371 | -1.397911 | 0.969347 | -0.218602 | 0.165675 | -1.187201 | -1.022111 | 0.090487 | -0.281353 | -1.520146 | -0.950575 | -1.733689 | -0.924046 | -0.731033 | 0 | 1 |
372 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x2445d307358>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[1]))
X = df_n_ps_std_mfcc[1].drop(columns='Cluster')
y = df_n_ps[1]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(191, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (20, 20, 20), 'learning_rate_init': 0.01, 'max_iter': 2000}, que permiten obtener un Accuracy de 81.68% y un Kappa del 52.97
Tiempo total: 25.07 minutos
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_4 (Dense) (None, 20) 280 _________________________________________________________________ dense_5 (Dense) (None, 20) 420 _________________________________________________________________ dense_6 (Dense) (None, 20) 420 _________________________________________________________________ dense_7 (Dense) (None, 1) 21 ================================================================= Total params: 1,141 Trainable params: 1,141 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 191 samples, validate on 64 samples Epoch 1/2000 191/191 [==============================] - 2s 10ms/step - loss: 0.6523 - accuracy: 0.5864 - val_loss: 0.5471 - val_accuracy: 0.7031 Epoch 2/2000 191/191 [==============================] - 0s 89us/step - loss: 0.4800 - accuracy: 0.7696 - val_loss: 0.5294 - val_accuracy: 0.7500 Epoch 3/2000 191/191 [==============================] - 0s 68us/step - loss: 0.4421 - accuracy: 0.8010 - val_loss: 0.5305 - val_accuracy: 0.7812 Epoch 4/2000 191/191 [==============================] - 0s 63us/step - loss: 0.4243 - accuracy: 0.8168 - val_loss: 0.5390 - val_accuracy: 0.7500 Epoch 5/2000 191/191 [==============================] - 0s 73us/step - loss: 0.4080 - accuracy: 0.8168 - val_loss: 0.5368 - val_accuracy: 0.7500 Epoch 6/2000 191/191 [==============================] - 0s 58us/step - loss: 0.3924 - accuracy: 0.8168 - val_loss: 0.5567 - val_accuracy: 0.7188 Epoch 7/2000 191/191 [==============================] - 0s 89us/step - loss: 0.3782 - accuracy: 0.8168 - val_loss: 0.5325 - val_accuracy: 0.7500 Epoch 8/2000 191/191 [==============================] - 0s 63us/step - loss: 0.3562 - accuracy: 0.8429 - val_loss: 0.5605 - val_accuracy: 0.7500 Epoch 9/2000 191/191 [==============================] - 0s 63us/step - loss: 0.3462 - accuracy: 0.8586 - val_loss: 0.5968 - val_accuracy: 0.7188 Epoch 10/2000 191/191 [==============================] - 0s 89us/step - loss: 0.3171 - accuracy: 0.8639 - val_loss: 0.5665 - val_accuracy: 0.7500 Epoch 11/2000 191/191 [==============================] - 0s 68us/step - loss: 0.3107 - accuracy: 0.8639 - val_loss: 0.5456 - val_accuracy: 0.7656 Epoch 12/2000 191/191 [==============================] - 0s 68us/step - loss: 0.2855 - accuracy: 0.8743 - val_loss: 0.6107 - val_accuracy: 0.7500 Epoch 13/2000 191/191 [==============================] - 0s 58us/step - loss: 0.2626 - accuracy: 0.8848 - val_loss: 0.6076 - val_accuracy: 0.7656 Epoch 00013: ReduceLROnPlateau reducing learning rate to 0.004999999888241291. Epoch 14/2000 191/191 [==============================] - 0s 94us/step - loss: 0.2396 - accuracy: 0.9215 - val_loss: 0.5932 - val_accuracy: 0.7656 Epoch 15/2000 191/191 [==============================] - 0s 68us/step - loss: 0.2269 - accuracy: 0.9162 - val_loss: 0.5980 - val_accuracy: 0.7812 Epoch 16/2000 191/191 [==============================] - 0s 63us/step - loss: 0.2133 - accuracy: 0.9372 - val_loss: 0.6289 - val_accuracy: 0.7656 Epoch 17/2000 191/191 [==============================] - 0s 68us/step - loss: 0.2023 - accuracy: 0.9372 - val_loss: 0.6384 - val_accuracy: 0.7656 Epoch 18/2000 191/191 [==============================] - 0s 73us/step - loss: 0.1904 - accuracy: 0.9424 - val_loss: 0.6197 - val_accuracy: 0.7656 Epoch 19/2000 191/191 [==============================] - 0s 63us/step - loss: 0.1817 - accuracy: 0.9372 - val_loss: 0.6255 - val_accuracy: 0.7656 Epoch 20/2000 191/191 [==============================] - 0s 68us/step - loss: 0.1725 - accuracy: 0.9424 - val_loss: 0.6415 - val_accuracy: 0.7500 Epoch 21/2000 191/191 [==============================] - 0s 73us/step - loss: 0.1603 - accuracy: 0.9424 - val_loss: 0.6400 - val_accuracy: 0.7656 Epoch 22/2000 191/191 [==============================] - 0s 58us/step - loss: 0.1473 - accuracy: 0.9581 - val_loss: 0.6420 - val_accuracy: 0.7500 Epoch 23/2000 191/191 [==============================] - 0s 58us/step - loss: 0.1367 - accuracy: 0.9738 - val_loss: 0.6448 - val_accuracy: 0.7656 Epoch 00023: ReduceLROnPlateau reducing learning rate to 0.0024999999441206455. Epoch 24/2000 191/191 [==============================] - 0s 68us/step - loss: 0.1248 - accuracy: 0.9791 - val_loss: 0.6578 - val_accuracy: 0.7812 Epoch 25/2000 191/191 [==============================] - 0s 63us/step - loss: 0.1211 - accuracy: 0.9738 - val_loss: 0.6633 - val_accuracy: 0.7812 Epoch 26/2000 191/191 [==============================] - 0s 58us/step - loss: 0.1166 - accuracy: 0.9791 - val_loss: 0.6651 - val_accuracy: 0.7656 Epoch 27/2000 191/191 [==============================] - 0s 110us/step - loss: 0.1121 - accuracy: 0.9791 - val_loss: 0.6655 - val_accuracy: 0.7500 Epoch 28/2000 191/191 [==============================] - 0s 73us/step - loss: 0.1069 - accuracy: 0.9791 - val_loss: 0.6765 - val_accuracy: 0.7656 Epoch 29/2000 191/191 [==============================] - 0s 63us/step - loss: 0.1026 - accuracy: 0.9791 - val_loss: 0.6865 - val_accuracy: 0.7656 Epoch 30/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0981 - accuracy: 0.9843 - val_loss: 0.6846 - val_accuracy: 0.7656 Epoch 31/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0922 - accuracy: 0.9843 - val_loss: 0.6932 - val_accuracy: 0.7656 Epoch 32/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0883 - accuracy: 0.9843 - val_loss: 0.7066 - val_accuracy: 0.7656 Epoch 33/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0829 - accuracy: 0.9895 - val_loss: 0.7111 - val_accuracy: 0.7656 Epoch 00033: ReduceLROnPlateau reducing learning rate to 0.0012499999720603228. Epoch 34/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0790 - accuracy: 0.9895 - val_loss: 0.7178 - val_accuracy: 0.7656 Epoch 35/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0767 - accuracy: 0.9895 - val_loss: 0.7203 - val_accuracy: 0.7656 Epoch 36/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0752 - accuracy: 0.9895 - val_loss: 0.7267 - val_accuracy: 0.7656 Epoch 37/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0729 - accuracy: 0.9895 - val_loss: 0.7269 - val_accuracy: 0.7656 Epoch 38/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0705 - accuracy: 0.9895 - val_loss: 0.7364 - val_accuracy: 0.7656 Epoch 39/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0686 - accuracy: 0.9895 - val_loss: 0.7434 - val_accuracy: 0.7656 Epoch 40/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0668 - accuracy: 0.9895 - val_loss: 0.7461 - val_accuracy: 0.7812 Epoch 41/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0648 - accuracy: 0.9895 - val_loss: 0.7476 - val_accuracy: 0.7812 Epoch 42/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0631 - accuracy: 0.9895 - val_loss: 0.7577 - val_accuracy: 0.7812 Epoch 43/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0610 - accuracy: 0.9895 - val_loss: 0.7680 - val_accuracy: 0.7812 Epoch 00043: ReduceLROnPlateau reducing learning rate to 0.0006249999860301614. Epoch 44/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0592 - accuracy: 0.9895 - val_loss: 0.7672 - val_accuracy: 0.7812 Epoch 45/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0582 - accuracy: 0.9895 - val_loss: 0.7674 - val_accuracy: 0.7812 Epoch 46/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0576 - accuracy: 0.9895 - val_loss: 0.7688 - val_accuracy: 0.7812 Epoch 47/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0566 - accuracy: 0.9895 - val_loss: 0.7700 - val_accuracy: 0.7812 Epoch 48/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0557 - accuracy: 0.9895 - val_loss: 0.7748 - val_accuracy: 0.7812 Epoch 49/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0547 - accuracy: 0.9895 - val_loss: 0.7777 - val_accuracy: 0.7812 Epoch 50/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0540 - accuracy: 0.9948 - val_loss: 0.7821 - val_accuracy: 0.7812 Epoch 51/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0533 - accuracy: 0.9948 - val_loss: 0.7849 - val_accuracy: 0.7812 Epoch 52/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0525 - accuracy: 0.9948 - val_loss: 0.7842 - val_accuracy: 0.7812 Epoch 53/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0515 - accuracy: 0.9948 - val_loss: 0.7869 - val_accuracy: 0.7812 Epoch 00053: ReduceLROnPlateau reducing learning rate to 0.0003124999930150807. Epoch 54/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0509 - accuracy: 0.9948 - val_loss: 0.7898 - val_accuracy: 0.7812 Epoch 55/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0504 - accuracy: 0.9948 - val_loss: 0.7899 - val_accuracy: 0.7812 Epoch 56/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0500 - accuracy: 0.9948 - val_loss: 0.7926 - val_accuracy: 0.7812 Epoch 57/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0496 - accuracy: 0.9948 - val_loss: 0.7942 - val_accuracy: 0.7812 Epoch 58/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0492 - accuracy: 0.9948 - val_loss: 0.7951 - val_accuracy: 0.7812 Epoch 59/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0487 - accuracy: 0.9948 - val_loss: 0.7963 - val_accuracy: 0.7812 Epoch 60/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0484 - accuracy: 0.9948 - val_loss: 0.7975 - val_accuracy: 0.7812 Epoch 61/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0480 - accuracy: 0.9948 - val_loss: 0.7996 - val_accuracy: 0.7812 Epoch 62/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0476 - accuracy: 0.9948 - val_loss: 0.8015 - val_accuracy: 0.7812 Epoch 63/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0472 - accuracy: 0.9948 - val_loss: 0.8032 - val_accuracy: 0.7656 Epoch 00063: ReduceLROnPlateau reducing learning rate to 0.00015624999650754035. Epoch 64/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0468 - accuracy: 0.9948 - val_loss: 0.8047 - val_accuracy: 0.7656 Epoch 65/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0466 - accuracy: 0.9948 - val_loss: 0.8048 - val_accuracy: 0.7656 Epoch 66/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0464 - accuracy: 0.9948 - val_loss: 0.8054 - val_accuracy: 0.7656 Epoch 67/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0462 - accuracy: 0.9948 - val_loss: 0.8063 - val_accuracy: 0.7656 Epoch 68/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0461 - accuracy: 0.9948 - val_loss: 0.8067 - val_accuracy: 0.7656 Epoch 69/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0459 - accuracy: 0.9948 - val_loss: 0.8076 - val_accuracy: 0.7656 Epoch 70/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0456 - accuracy: 0.9948 - val_loss: 0.8089 - val_accuracy: 0.7656 Epoch 71/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0455 - accuracy: 0.9948 - val_loss: 0.8099 - val_accuracy: 0.7656 Epoch 72/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0452 - accuracy: 0.9948 - val_loss: 0.8107 - val_accuracy: 0.7656 Epoch 73/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0451 - accuracy: 0.9948 - val_loss: 0.8106 - val_accuracy: 0.7656 Epoch 00073: ReduceLROnPlateau reducing learning rate to 7.812499825377017e-05. Epoch 74/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0449 - accuracy: 0.9948 - val_loss: 0.8107 - val_accuracy: 0.7656 Epoch 75/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0448 - accuracy: 0.9948 - val_loss: 0.8113 - val_accuracy: 0.7656 Epoch 76/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0447 - accuracy: 0.9948 - val_loss: 0.8118 - val_accuracy: 0.7656 Epoch 77/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0446 - accuracy: 0.9948 - val_loss: 0.8122 - val_accuracy: 0.7656 Epoch 78/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0445 - accuracy: 0.9948 - val_loss: 0.8128 - val_accuracy: 0.7656 Epoch 79/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0444 - accuracy: 0.9948 - val_loss: 0.8131 - val_accuracy: 0.7656 Epoch 80/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0443 - accuracy: 0.9948 - val_loss: 0.8137 - val_accuracy: 0.7656 Epoch 81/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0442 - accuracy: 0.9948 - val_loss: 0.8142 - val_accuracy: 0.7656 Epoch 82/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0441 - accuracy: 0.9948 - val_loss: 0.8146 - val_accuracy: 0.7656 Epoch 83/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0440 - accuracy: 0.9948 - val_loss: 0.8150 - val_accuracy: 0.7656 Epoch 00083: ReduceLROnPlateau reducing learning rate to 3.9062499126885086e-05. Epoch 84/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0439 - accuracy: 0.9948 - val_loss: 0.8152 - val_accuracy: 0.7656 Epoch 85/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0439 - accuracy: 0.9948 - val_loss: 0.8155 - val_accuracy: 0.7656 Epoch 86/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0438 - accuracy: 0.9948 - val_loss: 0.8155 - val_accuracy: 0.7656 Epoch 87/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0438 - accuracy: 0.9948 - val_loss: 0.8159 - val_accuracy: 0.7656 Epoch 88/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0437 - accuracy: 0.9948 - val_loss: 0.8161 - val_accuracy: 0.7656 Epoch 89/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0437 - accuracy: 0.9948 - val_loss: 0.8162 - val_accuracy: 0.7656 Epoch 90/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0436 - accuracy: 0.9948 - val_loss: 0.8164 - val_accuracy: 0.7656 Epoch 91/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0436 - accuracy: 0.9948 - val_loss: 0.8166 - val_accuracy: 0.7656 Epoch 92/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0435 - accuracy: 0.9948 - val_loss: 0.8170 - val_accuracy: 0.7656 Epoch 93/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0435 - accuracy: 0.9948 - val_loss: 0.8171 - val_accuracy: 0.7656 Epoch 00093: ReduceLROnPlateau reducing learning rate to 1.9531249563442543e-05. Epoch 94/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0434 - accuracy: 0.9948 - val_loss: 0.8172 - val_accuracy: 0.7656 Epoch 95/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0434 - accuracy: 0.9948 - val_loss: 0.8174 - val_accuracy: 0.7656 Epoch 96/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0434 - accuracy: 0.9948 - val_loss: 0.8175 - val_accuracy: 0.7656 Epoch 97/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8174 - val_accuracy: 0.7656 Epoch 98/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8176 - val_accuracy: 0.7656 Epoch 99/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8177 - val_accuracy: 0.7656 Epoch 100/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8179 - val_accuracy: 0.7656 Epoch 101/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8180 - val_accuracy: 0.7656 Epoch 102/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8180 - val_accuracy: 0.7656 Epoch 103/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8182 - val_accuracy: 0.7656 Epoch 00103: ReduceLROnPlateau reducing learning rate to 9.765624781721272e-06. Epoch 104/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8182 - val_accuracy: 0.7656 Epoch 105/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8183 - val_accuracy: 0.7656 Epoch 106/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8183 - val_accuracy: 0.7656 Epoch 107/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8184 - val_accuracy: 0.7656 Epoch 108/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8185 - val_accuracy: 0.7656 Epoch 109/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8185 - val_accuracy: 0.7656 Epoch 110/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8186 - val_accuracy: 0.7656 Epoch 111/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8186 - val_accuracy: 0.7656 Epoch 112/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8187 - val_accuracy: 0.7656 Epoch 113/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8187 - val_accuracy: 0.7656 Epoch 00113: ReduceLROnPlateau reducing learning rate to 4.882812390860636e-06. Epoch 114/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8188 - val_accuracy: 0.7656 Epoch 115/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8188 - val_accuracy: 0.7656 Epoch 116/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8188 - val_accuracy: 0.7656 Epoch 117/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8189 - val_accuracy: 0.7656 Epoch 118/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8189 - val_accuracy: 0.7656 Epoch 119/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8189 - val_accuracy: 0.7656 Epoch 120/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 121/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 122/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 123/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 00123: ReduceLROnPlateau reducing learning rate to 2.441406195430318e-06. Epoch 124/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 125/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 126/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 127/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 128/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 129/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 130/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 131/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 132/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 133/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 00133: ReduceLROnPlateau reducing learning rate to 1.220703097715159e-06. Epoch 134/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 135/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 136/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 137/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 138/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 139/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 140/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 141/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 142/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 143/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 00143: ReduceLROnPlateau reducing learning rate to 6.103515488575795e-07. Epoch 144/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 145/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 146/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 147/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 148/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 149/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 150/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 151/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 152/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 153/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00153: ReduceLROnPlateau reducing learning rate to 3.0517577442878974e-07. Epoch 154/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 155/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 156/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 157/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 158/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 159/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 160/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 161/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 162/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 163/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00163: ReduceLROnPlateau reducing learning rate to 1.5258788721439487e-07. Epoch 164/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 165/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 166/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 167/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 168/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 169/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 170/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 171/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 172/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 173/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00173: ReduceLROnPlateau reducing learning rate to 7.629394360719743e-08. Epoch 174/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 175/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 176/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 177/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 178/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 179/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 180/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 181/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 182/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 183/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00183: ReduceLROnPlateau reducing learning rate to 3.814697180359872e-08. Epoch 184/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 185/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 186/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 187/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 188/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 189/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 190/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 191/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 192/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 193/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00193: ReduceLROnPlateau reducing learning rate to 1.907348590179936e-08. Epoch 194/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 195/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 196/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 197/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 198/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 199/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 200/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 201/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 202/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 203/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00203: ReduceLROnPlateau reducing learning rate to 9.53674295089968e-09. Epoch 204/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 205/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 206/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 207/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 208/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 209/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 210/2000 191/191 [==============================] - ETA: 0s - loss: 0.0166 - accuracy: 1.00 - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 211/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 212/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 213/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00213: ReduceLROnPlateau reducing learning rate to 4.76837147544984e-09. Epoch 214/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 215/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 216/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 217/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 218/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 219/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 220/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 221/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 222/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 223/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00223: ReduceLROnPlateau reducing learning rate to 2.38418573772492e-09. Epoch 224/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 225/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 226/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 227/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 228/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 229/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 230/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 231/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 232/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 233/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00233: ReduceLROnPlateau reducing learning rate to 1.19209286886246e-09. Epoch 234/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 235/2000 191/191 [==============================] - ETA: 0s - loss: 0.0224 - accuracy: 1.00 - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 236/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 237/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 238/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 239/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 240/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 241/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 242/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 243/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00243: ReduceLROnPlateau reducing learning rate to 5.9604643443123e-10. Epoch 244/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 245/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 246/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 247/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 248/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 249/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 250/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 251/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 252/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 253/2000 191/191 [==============================] - ETA: 0s - loss: 0.0256 - accuracy: 1.00 - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00253: ReduceLROnPlateau reducing learning rate to 2.98023217215615e-10. Epoch 254/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 255/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 256/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 257/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 258/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 259/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 260/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 261/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 262/2000 191/191 [==============================] - ETA: 0s - loss: 0.0223 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 263/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00263: ReduceLROnPlateau reducing learning rate to 1.490116086078075e-10. Epoch 264/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 265/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 266/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 267/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 268/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 269/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 270/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 271/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 272/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 273/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00273: ReduceLROnPlateau reducing learning rate to 7.450580430390374e-11. Epoch 274/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 275/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 276/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 277/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 278/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 279/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 280/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 281/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 282/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 283/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00283: ReduceLROnPlateau reducing learning rate to 3.725290215195187e-11. Epoch 284/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 285/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 286/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 287/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 288/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 289/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 290/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 291/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 292/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 293/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00293: ReduceLROnPlateau reducing learning rate to 1.8626451075975936e-11. Epoch 294/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 295/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 296/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 297/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 298/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 299/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 300/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 301/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 302/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 303/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00303: ReduceLROnPlateau reducing learning rate to 9.313225537987968e-12. Epoch 304/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 305/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 306/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 307/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 308/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 309/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 310/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 311/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 312/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 313/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00313: ReduceLROnPlateau reducing learning rate to 4.656612768993984e-12. Epoch 314/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 315/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 316/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 317/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 318/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 319/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 320/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 321/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 322/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 323/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00323: ReduceLROnPlateau reducing learning rate to 2.328306384496992e-12. Epoch 324/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 325/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 326/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 327/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 328/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 329/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 330/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 331/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 332/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 333/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00333: ReduceLROnPlateau reducing learning rate to 1.164153192248496e-12. Epoch 334/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 335/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 336/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 337/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 338/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 339/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 340/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 341/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 342/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 343/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00343: ReduceLROnPlateau reducing learning rate to 5.82076596124248e-13. Epoch 344/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 345/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 346/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 347/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 348/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 349/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 350/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 351/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 352/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 353/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00353: ReduceLROnPlateau reducing learning rate to 2.91038298062124e-13. Epoch 354/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 355/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 356/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 357/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 358/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 359/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 360/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 361/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 362/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 363/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00363: ReduceLROnPlateau reducing learning rate to 1.45519149031062e-13. Epoch 364/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 365/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 366/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 367/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 368/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 369/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 370/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 371/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 372/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 373/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00373: ReduceLROnPlateau reducing learning rate to 7.2759574515531e-14. Epoch 374/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 375/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 376/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 377/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 378/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 379/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 380/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 381/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 382/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 383/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00383: ReduceLROnPlateau reducing learning rate to 3.63797872577655e-14. Epoch 384/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 385/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 386/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 387/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 388/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 389/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 390/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 391/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 392/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 393/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00393: ReduceLROnPlateau reducing learning rate to 1.818989362888275e-14. Epoch 394/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 395/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 396/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 397/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 398/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 399/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 400/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 401/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 402/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 403/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00403: ReduceLROnPlateau reducing learning rate to 9.094946814441375e-15. Epoch 404/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 405/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 406/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 407/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 408/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 409/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 410/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 411/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 412/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 413/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00413: ReduceLROnPlateau reducing learning rate to 4.5474734072206875e-15. Epoch 414/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 415/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 416/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 417/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 418/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 419/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 420/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 421/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 422/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 423/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00423: ReduceLROnPlateau reducing learning rate to 2.2737367036103438e-15. Epoch 424/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 425/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 426/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 427/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 428/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 429/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 430/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 431/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 432/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 433/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00433: ReduceLROnPlateau reducing learning rate to 1.1368683518051719e-15. Epoch 434/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 435/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 436/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 437/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 438/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 439/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 440/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 441/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 442/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 443/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00443: ReduceLROnPlateau reducing learning rate to 5.684341759025859e-16. Epoch 444/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 445/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 446/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 447/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 448/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 449/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 450/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 451/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 452/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 453/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00453: ReduceLROnPlateau reducing learning rate to 2.8421708795129297e-16. Epoch 454/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 455/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 456/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 457/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 458/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 459/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 460/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 461/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 462/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 463/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00463: ReduceLROnPlateau reducing learning rate to 1.4210854397564648e-16. Epoch 464/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 465/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 466/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 467/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 468/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 469/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 470/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 471/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 472/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 473/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00473: ReduceLROnPlateau reducing learning rate to 7.105427198782324e-17. Epoch 474/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 475/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 476/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 477/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 478/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 479/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 480/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 481/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 482/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 483/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00483: ReduceLROnPlateau reducing learning rate to 3.552713599391162e-17. Epoch 484/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 485/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 486/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 487/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 488/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 489/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 490/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 491/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 492/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 493/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00493: ReduceLROnPlateau reducing learning rate to 1.776356799695581e-17. Epoch 494/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 495/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 496/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 497/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 498/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 499/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 500/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 501/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 502/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 503/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00503: ReduceLROnPlateau reducing learning rate to 8.881783998477905e-18. Epoch 504/2000 191/191 [==============================] - 0s 152us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 505/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 506/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 507/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 508/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 509/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 510/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 511/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 512/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 513/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00513: ReduceLROnPlateau reducing learning rate to 4.440891999238953e-18. Epoch 514/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 515/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 516/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 517/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 518/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 519/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 520/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 521/2000 191/191 [==============================] - 0s 141us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 522/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 523/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00523: ReduceLROnPlateau reducing learning rate to 2.2204459996194763e-18. Epoch 524/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 525/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 526/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 527/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 528/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 529/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 530/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 531/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 532/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 533/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00533: ReduceLROnPlateau reducing learning rate to 1.1102229998097382e-18. Epoch 534/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 535/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 536/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 537/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 538/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 539/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 540/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 541/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 542/2000 191/191 [==============================] - ETA: 0s - loss: 0.0396 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 543/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00543: ReduceLROnPlateau reducing learning rate to 5.551114999048691e-19. Epoch 544/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 545/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 546/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 547/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 548/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 549/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 550/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 551/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 552/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 553/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00553: ReduceLROnPlateau reducing learning rate to 2.7755574995243454e-19. Epoch 554/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 555/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 556/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 557/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 558/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 559/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 560/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 561/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 562/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 563/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00563: ReduceLROnPlateau reducing learning rate to 1.3877787497621727e-19. Epoch 564/2000 191/191 [==============================] - 0s 303us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 565/2000 191/191 [==============================] - 0s 288us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 566/2000 191/191 [==============================] - 0s 194us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 567/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 568/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 569/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 570/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 571/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 572/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 573/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00573: ReduceLROnPlateau reducing learning rate to 6.938893748810864e-20. Epoch 574/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 575/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 576/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 577/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 578/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 579/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 580/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 581/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 582/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 583/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00583: ReduceLROnPlateau reducing learning rate to 3.469446874405432e-20. Epoch 584/2000 191/191 [==============================] - 0s 152us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 585/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 586/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 587/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 588/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 589/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 590/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 591/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 592/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 593/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00593: ReduceLROnPlateau reducing learning rate to 1.734723437202716e-20. Epoch 594/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 595/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 596/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 597/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 598/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 599/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 600/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 601/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 602/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 603/2000 191/191 [==============================] - 0s 173us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00603: ReduceLROnPlateau reducing learning rate to 8.67361718601358e-21. Epoch 604/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 605/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 606/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 607/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 608/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 609/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 610/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 611/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 612/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 613/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00613: ReduceLROnPlateau reducing learning rate to 4.33680859300679e-21. Epoch 614/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 615/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 616/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 617/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 618/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 619/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 620/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 621/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 622/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 623/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00623: ReduceLROnPlateau reducing learning rate to 2.168404296503395e-21. Epoch 624/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 625/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 626/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 627/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 628/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 629/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 630/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 631/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 632/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 633/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00633: ReduceLROnPlateau reducing learning rate to 1.0842021482516974e-21. Epoch 634/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 635/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 636/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 637/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 638/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 639/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 640/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 641/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 642/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 643/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00643: ReduceLROnPlateau reducing learning rate to 5.421010741258487e-22. Epoch 644/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 645/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 646/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 647/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 648/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 649/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 650/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 651/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 652/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 653/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00653: ReduceLROnPlateau reducing learning rate to 2.7105053706292436e-22. Epoch 654/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 655/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 656/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 657/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 658/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 659/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 660/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 661/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 662/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 663/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00663: ReduceLROnPlateau reducing learning rate to 1.3552526853146218e-22. Epoch 664/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 665/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 666/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 667/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 668/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 669/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 670/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 671/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 672/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 673/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00673: ReduceLROnPlateau reducing learning rate to 6.776263426573109e-23. Epoch 674/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 675/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 676/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 677/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 678/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 679/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 680/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 681/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 682/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 683/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00683: ReduceLROnPlateau reducing learning rate to 3.3881317132865545e-23. Epoch 684/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 685/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 686/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 687/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 688/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 689/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 690/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 691/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 692/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 693/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00693: ReduceLROnPlateau reducing learning rate to 1.6940658566432772e-23. Epoch 694/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 695/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 696/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 697/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 698/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 699/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 700/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 701/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 702/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 703/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00703: ReduceLROnPlateau reducing learning rate to 8.470329283216386e-24. Epoch 704/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 705/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 706/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 707/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 708/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 709/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 710/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 711/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 712/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 713/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00713: ReduceLROnPlateau reducing learning rate to 4.235164641608193e-24. Epoch 714/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 715/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 716/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 717/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 718/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 719/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 720/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 721/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 722/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 723/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00723: ReduceLROnPlateau reducing learning rate to 2.1175823208040965e-24. Epoch 724/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 725/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 726/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 727/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 728/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 729/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 730/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 731/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 732/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 733/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00733: ReduceLROnPlateau reducing learning rate to 1.0587911604020483e-24. Epoch 734/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 735/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 736/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 737/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 738/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 739/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 740/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 741/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 742/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 743/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00743: ReduceLROnPlateau reducing learning rate to 5.293955802010241e-25. Epoch 744/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 745/2000 191/191 [==============================] - ETA: 0s - loss: 0.0317 - accuracy: 1.00 - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 746/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 747/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 748/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 749/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 750/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 751/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 752/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 753/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00753: ReduceLROnPlateau reducing learning rate to 2.6469779010051207e-25. Epoch 754/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 755/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 756/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 757/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 758/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 759/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 760/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 761/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 762/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 763/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00763: ReduceLROnPlateau reducing learning rate to 1.3234889505025603e-25. Epoch 764/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 765/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 766/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 767/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 768/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 769/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 770/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 771/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 772/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 773/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00773: ReduceLROnPlateau reducing learning rate to 6.617444752512802e-26. Epoch 774/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 775/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 776/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 777/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 778/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 779/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 780/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 781/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 782/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 783/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00783: ReduceLROnPlateau reducing learning rate to 3.308722376256401e-26. Epoch 784/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 785/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 786/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 787/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 788/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 789/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 790/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 791/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 792/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 793/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00793: ReduceLROnPlateau reducing learning rate to 1.6543611881282004e-26. Epoch 794/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 795/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 796/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 797/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 798/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 799/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 800/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 801/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 802/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 803/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00803: ReduceLROnPlateau reducing learning rate to 8.271805940641002e-27. Epoch 804/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 805/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 806/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 807/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 808/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 809/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 810/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 811/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 812/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 813/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00813: ReduceLROnPlateau reducing learning rate to 4.135902970320501e-27. Epoch 814/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 815/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 816/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 817/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 818/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 819/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 820/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 821/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 822/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 823/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00823: ReduceLROnPlateau reducing learning rate to 2.0679514851602505e-27. Epoch 824/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 825/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 826/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 827/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 828/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 829/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 830/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 831/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 832/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 833/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00833: ReduceLROnPlateau reducing learning rate to 1.0339757425801253e-27. Epoch 834/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 835/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 836/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 837/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 838/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 839/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 840/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 841/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 842/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 843/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00843: ReduceLROnPlateau reducing learning rate to 5.169878712900626e-28. Epoch 844/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 845/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 846/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 847/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 848/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 849/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 850/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 851/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 852/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 853/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00853: ReduceLROnPlateau reducing learning rate to 2.584939356450313e-28. Epoch 854/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 855/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 856/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 857/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 858/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 859/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 860/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 861/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 862/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 863/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00863: ReduceLROnPlateau reducing learning rate to 1.2924696782251566e-28. Epoch 864/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 865/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 866/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 867/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 868/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 869/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 870/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 871/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 872/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 873/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00873: ReduceLROnPlateau reducing learning rate to 6.462348391125783e-29. Epoch 874/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 875/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 876/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 877/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 878/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 879/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 880/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 881/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 882/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 883/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00883: ReduceLROnPlateau reducing learning rate to 3.2311741955628914e-29. Epoch 884/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 885/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 886/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 887/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 888/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 889/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 890/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 891/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 892/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 893/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00893: ReduceLROnPlateau reducing learning rate to 1.6155870977814457e-29. Epoch 894/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 895/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 896/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 897/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 898/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 899/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 900/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 901/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 902/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 903/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00903: ReduceLROnPlateau reducing learning rate to 8.077935488907229e-30. Epoch 904/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 905/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 906/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 907/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 908/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 909/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 910/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 911/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 912/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 913/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00913: ReduceLROnPlateau reducing learning rate to 4.038967744453614e-30. Epoch 914/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 915/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 916/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 917/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 918/2000 191/191 [==============================] - ETA: 0s - loss: 0.0296 - accuracy: 1.00 - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 919/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 920/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 921/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 922/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 923/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00923: ReduceLROnPlateau reducing learning rate to 2.019483872226807e-30. Epoch 924/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 925/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 926/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 927/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 928/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 929/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 930/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 931/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 932/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 933/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00933: ReduceLROnPlateau reducing learning rate to 1.0097419361134036e-30. Epoch 934/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 935/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 936/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 937/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 938/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 939/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 940/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 941/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 942/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 943/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00943: ReduceLROnPlateau reducing learning rate to 5.048709680567018e-31. Epoch 944/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 945/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 946/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 947/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 948/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 949/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 950/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 951/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 952/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 953/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00953: ReduceLROnPlateau reducing learning rate to 2.524354840283509e-31. Epoch 954/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 955/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 956/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 957/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 958/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 959/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 960/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 961/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 962/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 963/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00963: ReduceLROnPlateau reducing learning rate to 1.2621774201417545e-31. Epoch 964/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 965/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 966/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 967/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 968/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 969/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 970/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 971/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 972/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 973/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00973: ReduceLROnPlateau reducing learning rate to 6.310887100708772e-32. Epoch 974/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 975/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 976/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 977/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 978/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 979/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 980/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 981/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 982/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 983/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00983: ReduceLROnPlateau reducing learning rate to 3.155443550354386e-32. Epoch 984/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 985/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 986/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 987/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 988/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 989/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 990/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 991/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 992/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 993/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00993: ReduceLROnPlateau reducing learning rate to 1.577721775177193e-32. Epoch 994/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 995/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 996/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 997/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 998/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 999/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1000/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1001/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1002/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1003/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01003: ReduceLROnPlateau reducing learning rate to 7.888608875885965e-33. Epoch 1004/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1005/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1006/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1007/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1008/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1009/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1010/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1011/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1012/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1013/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01013: ReduceLROnPlateau reducing learning rate to 3.944304437942983e-33. Epoch 1014/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1015/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1016/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1017/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1018/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1019/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1020/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1021/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1022/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1023/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01023: ReduceLROnPlateau reducing learning rate to 1.9721522189714914e-33. Epoch 1024/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1025/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1026/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1027/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1028/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1029/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1030/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1031/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1032/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1033/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01033: ReduceLROnPlateau reducing learning rate to 9.860761094857457e-34. Epoch 1034/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1035/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1036/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1037/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1038/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1039/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1040/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1041/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1042/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1043/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01043: ReduceLROnPlateau reducing learning rate to 4.930380547428728e-34. Epoch 1044/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1045/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1046/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1047/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1048/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1049/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1050/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1051/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1052/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1053/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01053: ReduceLROnPlateau reducing learning rate to 2.465190273714364e-34. Epoch 1054/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1055/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1056/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1057/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1058/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1059/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1060/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1061/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1062/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1063/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01063: ReduceLROnPlateau reducing learning rate to 1.232595136857182e-34. Epoch 1064/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1065/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1066/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1067/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1068/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1069/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1070/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1071/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1072/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1073/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01073: ReduceLROnPlateau reducing learning rate to 6.16297568428591e-35. Epoch 1074/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1075/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1076/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1077/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1078/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1079/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1080/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1081/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1082/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1083/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01083: ReduceLROnPlateau reducing learning rate to 3.081487842142955e-35. Epoch 1084/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1085/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1086/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1087/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1088/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1089/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1090/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1091/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1092/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1093/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01093: ReduceLROnPlateau reducing learning rate to 1.5407439210714776e-35. Epoch 1094/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1095/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1096/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1097/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1098/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1099/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1100/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1101/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1102/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1103/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01103: ReduceLROnPlateau reducing learning rate to 7.703719605357388e-36. Epoch 1104/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1105/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1106/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1107/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1108/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1109/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1110/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1111/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1112/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1113/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01113: ReduceLROnPlateau reducing learning rate to 3.851859802678694e-36. Epoch 1114/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1115/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1116/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1117/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1118/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1119/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1120/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1121/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1122/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1123/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01123: ReduceLROnPlateau reducing learning rate to 1.925929901339347e-36. Epoch 1124/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1125/2000 191/191 [==============================] - ETA: 0s - loss: 0.0433 - accuracy: 1.00 - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1126/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1127/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1128/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1129/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1130/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1131/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1132/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1133/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01133: ReduceLROnPlateau reducing learning rate to 9.629649506696735e-37. Epoch 1134/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1135/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1136/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1137/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1138/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1139/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1140/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1141/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1142/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1143/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01143: ReduceLROnPlateau reducing learning rate to 4.8148247533483676e-37. Epoch 1144/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1145/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1146/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1147/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1148/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1149/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1150/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1151/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1152/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1153/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01153: ReduceLROnPlateau reducing learning rate to 2.4074123766741838e-37. Epoch 1154/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1155/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1156/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1157/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1158/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1159/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1160/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1161/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1162/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1163/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01163: ReduceLROnPlateau reducing learning rate to 1.2037061883370919e-37. Epoch 1164/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1165/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1166/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1167/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1168/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1169/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1170/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1171/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1172/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1173/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01173: ReduceLROnPlateau reducing learning rate to 6.018530941685459e-38. Epoch 1174/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1175/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1176/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1177/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1178/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1179/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1180/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1181/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1182/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1183/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01183: ReduceLROnPlateau reducing learning rate to 3.0092654708427297e-38. Epoch 1184/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1185/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1186/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1187/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1188/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1189/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1190/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1191/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1192/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1193/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01193: ReduceLROnPlateau reducing learning rate to 1.5046327354213649e-38. Epoch 1194/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1195/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1196/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1197/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1198/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1199/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1200/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1201/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1202/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1203/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01203: ReduceLROnPlateau reducing learning rate to 7.523163677106824e-39. Epoch 1204/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1205/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1206/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1207/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1208/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1209/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1210/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1211/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1212/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1213/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01213: ReduceLROnPlateau reducing learning rate to 3.761581838553412e-39. Epoch 1214/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1215/2000 191/191 [==============================] - ETA: 0s - loss: 0.0311 - accuracy: 1.00 - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1216/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1217/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1218/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1219/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1220/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1221/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1222/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1223/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01223: ReduceLROnPlateau reducing learning rate to 1.88079056895209e-39. Epoch 1224/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1225/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1226/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1227/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1228/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1229/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1230/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1231/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1232/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1233/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01233: ReduceLROnPlateau reducing learning rate to 9.40395284476045e-40. Epoch 1234/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1235/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1236/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1237/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1238/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1239/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1240/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1241/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1242/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1243/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01243: ReduceLROnPlateau reducing learning rate to 4.701972919134064e-40. Epoch 1244/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1245/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1246/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1247/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1248/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1249/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1250/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1251/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1252/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1253/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01253: ReduceLROnPlateau reducing learning rate to 2.350986459567032e-40. Epoch 1254/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1255/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1256/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1257/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1258/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1259/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1260/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1261/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1262/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1263/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01263: ReduceLROnPlateau reducing learning rate to 1.175493229783516e-40. Epoch 1264/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1265/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1266/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1267/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1268/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1269/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1270/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1271/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1272/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1273/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01273: ReduceLROnPlateau reducing learning rate to 5.87746614891758e-41. Epoch 1274/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1275/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1276/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1277/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1278/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1279/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1280/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1281/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1282/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1283/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01283: ReduceLROnPlateau reducing learning rate to 2.93873307445879e-41. Epoch 1284/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1285/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1286/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1287/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1288/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1289/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1290/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1291/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1292/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1293/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01293: ReduceLROnPlateau reducing learning rate to 1.4694015696910032e-41. Epoch 1294/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1295/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1296/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1297/2000 191/191 [==============================] - ETA: 0s - loss: 0.0313 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1298/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1299/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1300/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1301/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1302/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1303/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01303: ReduceLROnPlateau reducing learning rate to 7.347007848455016e-42. Epoch 1304/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1305/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1306/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1307/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1308/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1309/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1310/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1311/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1312/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1313/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01313: ReduceLROnPlateau reducing learning rate to 3.673503924227508e-42. Epoch 1314/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1315/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1316/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1317/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1318/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1319/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1320/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1321/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1322/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1323/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01323: ReduceLROnPlateau reducing learning rate to 1.8371022867298352e-42. Epoch 1324/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1325/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1326/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1327/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1328/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1329/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1330/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1331/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1332/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1333/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01333: ReduceLROnPlateau reducing learning rate to 9.185511433649176e-43. Epoch 1334/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1335/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1336/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1337/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1338/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1339/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1340/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1341/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1342/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1343/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01343: ReduceLROnPlateau reducing learning rate to 4.5962589629854e-43. Epoch 1344/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1345/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1346/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1347/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1348/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1349/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1350/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1351/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1352/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1353/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01353: ReduceLROnPlateau reducing learning rate to 2.2981294814927e-43. Epoch 1354/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1355/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1356/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1357/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1358/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1359/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1360/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1361/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1362/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1363/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01363: ReduceLROnPlateau reducing learning rate to 1.14906474074635e-43. Epoch 1364/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1365/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1366/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1367/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1368/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1369/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1370/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1371/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1372/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1373/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01373: ReduceLROnPlateau reducing learning rate to 5.74532370373175e-44. Epoch 1374/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1375/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1376/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1377/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1378/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1379/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1380/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1381/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1382/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1383/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01383: ReduceLROnPlateau reducing learning rate to 2.872661851865875e-44. Epoch 1384/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1385/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1386/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1387/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1388/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1389/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1390/2000 191/191 [==============================] - ETA: 0s - loss: 0.0479 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1391/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1392/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1393/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01393: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-44. Epoch 1394/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1395/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1396/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1397/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1398/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1399/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1400/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1401/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1402/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1403/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01403: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-45. Epoch 1404/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1405/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1406/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1407/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1408/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1409/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1410/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1411/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1412/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1413/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01413: ReduceLROnPlateau reducing learning rate to 3.5032461608120427e-45. Epoch 1414/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1415/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1416/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1417/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1418/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1419/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1420/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1421/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1422/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1423/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01423: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-45. Epoch 1424/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1425/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1426/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1427/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1428/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1429/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1430/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1431/2000 191/191 [==============================] - ETA: 0s - loss: 0.0420 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1432/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1433/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01433: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-46. Epoch 1434/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1435/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1436/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1437/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1438/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1439/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1440/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1441/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1442/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1443/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1444/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1445/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1446/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1447/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1448/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1449/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1450/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1451/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1452/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1453/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1454/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1455/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1456/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1457/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1458/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1459/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1460/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1461/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1462/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1463/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1464/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1465/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1466/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1467/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1468/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1469/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1470/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1471/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1472/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1473/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1474/2000 191/191 [==============================] - ETA: 0s - loss: 0.0491 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1475/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1476/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1477/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1478/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1479/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1480/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1481/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1482/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1483/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1484/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1485/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1486/2000 191/191 [==============================] - 0s 147us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1487/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1488/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1489/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1490/2000 191/191 [==============================] - 0s 114us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1491/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1492/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1493/2000 191/191 [==============================] - 0s 109us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1494/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1495/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1496/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1497/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1498/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1499/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1500/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1501/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1502/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1503/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1504/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1505/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1506/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1507/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1508/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1509/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1510/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1511/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1512/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1513/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1514/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1515/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1516/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1517/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1518/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1519/2000 191/191 [==============================] - 0s 95us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1520/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1521/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1522/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1523/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1524/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1525/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1526/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1527/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1528/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1529/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1530/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1531/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1532/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1533/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1534/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1535/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1536/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1537/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1538/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1539/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1540/2000 191/191 [==============================] - ETA: 0s - loss: 0.0384 - accuracy: 1.00 - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1541/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1542/2000 191/191 [==============================] - 0s 188us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1543/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1544/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1545/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1546/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1547/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1548/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1549/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1550/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1551/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1552/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1553/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1554/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1555/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1556/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1557/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1558/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1559/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1560/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1561/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1562/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1563/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1564/2000 191/191 [==============================] - ETA: 0s - loss: 0.0228 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1565/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1566/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1567/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1568/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1569/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1570/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1571/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1572/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1573/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1574/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1575/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1576/2000 191/191 [==============================] - 0s 111us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1577/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1578/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1579/2000 191/191 [==============================] - 0s 102us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1580/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1581/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1582/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1583/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1584/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1585/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1586/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1587/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1588/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1589/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1590/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1591/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1592/2000 191/191 [==============================] - 0s 101us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1593/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1594/2000 191/191 [==============================] - 0s 112us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1595/2000 191/191 [==============================] - 0s 106us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1596/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1597/2000 191/191 [==============================] - ETA: 0s - loss: 0.0252 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1598/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1599/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1600/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1601/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1602/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1603/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1604/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1605/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1606/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1607/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1608/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1609/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1610/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1611/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1612/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1613/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1614/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1615/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1616/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1617/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1618/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1619/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1620/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1621/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1622/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1623/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1624/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1625/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1626/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1627/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1628/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1629/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1630/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1631/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1632/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1633/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1634/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1635/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1636/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1637/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1638/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1639/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1640/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1641/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1642/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1643/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1644/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1645/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1646/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1647/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1648/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1649/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1650/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1651/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1652/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1653/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1654/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1655/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1656/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1657/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1658/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1659/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1660/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1661/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1662/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1663/2000 191/191 [==============================] - 0s 152us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1664/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1665/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1666/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1667/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1668/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1669/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1670/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1671/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1672/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1673/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1674/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1675/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1676/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1677/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1678/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1679/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1680/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1681/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1682/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1683/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1684/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1685/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1686/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1687/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1688/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1689/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1690/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1691/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1692/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1693/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1694/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1695/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1696/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1697/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1698/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1699/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1700/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1701/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1702/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1703/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1704/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1705/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1706/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1707/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1708/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1709/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1710/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1711/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1712/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1713/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1714/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1715/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1716/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1717/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1718/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1719/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1720/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1721/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1722/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1723/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1724/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1725/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1726/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1727/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1728/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1729/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1730/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1731/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1732/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1733/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1734/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1735/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1736/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1737/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1738/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1739/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1740/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1741/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1742/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1743/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1744/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1745/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1746/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1747/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1748/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1749/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1750/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1751/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1752/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1753/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1754/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1755/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1756/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1757/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1758/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1759/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1760/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1761/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1762/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1763/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1764/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1765/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1766/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1767/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1768/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1769/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1770/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1771/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1772/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1773/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1774/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1775/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1776/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1777/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1778/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1779/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1780/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1781/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1782/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1783/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1784/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1785/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1786/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1787/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1788/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1789/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1790/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1791/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1792/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1793/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1794/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1795/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1796/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1797/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1798/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1799/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1800/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1801/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1802/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1803/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1804/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1805/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1806/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1807/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1808/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1809/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1810/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1811/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1812/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1813/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1814/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1815/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1816/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1817/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1818/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1819/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1820/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1821/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1822/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1823/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1824/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1825/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1826/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1827/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1828/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1829/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1830/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1831/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1832/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1833/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1834/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1835/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1836/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1837/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1838/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1839/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1840/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1841/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1842/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1843/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1844/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1845/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1846/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1847/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1848/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1849/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1850/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1851/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1852/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1853/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1854/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1855/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1856/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1857/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1858/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1859/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1860/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1861/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1862/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1863/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1864/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1865/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1866/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1867/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1868/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1869/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1870/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1871/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1872/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1873/2000 191/191 [==============================] - ETA: 0s - loss: 0.0489 - accuracy: 1.00 - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1874/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1875/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1876/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1877/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1878/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1879/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1880/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1881/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1882/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1883/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1884/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1885/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1886/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1887/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1888/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1889/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1890/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1891/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1892/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1893/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1894/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1895/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1896/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1897/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1898/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1899/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1900/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1901/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1902/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1903/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1904/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1905/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1906/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1907/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1908/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1909/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1910/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1911/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1912/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1913/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1914/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1915/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1916/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1917/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1918/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1919/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1920/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1921/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1922/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1923/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1924/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1925/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1926/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1927/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1928/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1929/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1930/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1931/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1932/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1933/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1934/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1935/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1936/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1937/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1938/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1939/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1940/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1941/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1942/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1943/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1944/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1945/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1946/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1947/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1948/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1949/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1950/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1951/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1952/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1953/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1954/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1955/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1956/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1957/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1958/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1959/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1960/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1961/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1962/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1963/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1964/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1965/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1966/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1967/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1968/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1969/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1970/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1971/2000 191/191 [==============================] - ETA: 0s - loss: 0.0400 - accuracy: 1.00 - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1972/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1973/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1974/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1975/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1976/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1977/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1978/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1979/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1980/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1981/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1982/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1983/2000 191/191 [==============================] - ETA: 0s - loss: 0.0319 - accuracy: 1.00 - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1984/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1985/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1986/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1987/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1988/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1989/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1990/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1991/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1992/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1993/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1994/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1995/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1996/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1997/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1998/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1999/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 2000/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 2000)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
64/64 [==============================] - 0s 47us/step test loss: 0.8193266093730927, test accuracy: 0.765625
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.7282608695652174
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.366754617414248
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.339415 | 0.847773 | 0.497198 | -0.389310 | 1.225458 | 1.947033 | -0.736267 | 0.492219 | 0.576682 | 1.504697 | -1.796460 | 0.724954 | 0.958600 |
| 1 | 0.587658 | -1.195426 | 0.636375 | 0.199876 | 0.765321 | 0.061181 | 0.379367 | -0.440867 | 0.232893 | 1.339920 | 0.110001 | 0.807525 | 0.815678 |
| 2 | 1.465595 | -2.307943 | 0.354567 | -0.058273 | -1.298853 | -0.811453 | -1.551580 | -3.934320 | -1.079432 | 2.546130 | 1.421407 | 0.639359 | 0.199094 |
| 3 | 0.749403 | -1.690498 | -0.125200 | -1.016135 | 0.825845 | 0.271444 | -0.104786 | -0.992141 | 0.049182 | 1.425948 | -0.343269 | -0.789558 | -0.411898 |
| 4 | -0.280577 | 0.393332 | 0.744917 | 2.411400 | -0.777421 | -0.420018 | 1.258355 | -1.544565 | -0.498071 | 0.421527 | -0.632908 | -0.056846 | -0.072348 |
| 5 | -0.158690 | 0.404891 | -0.147920 | -0.299241 | -0.786974 | 0.697216 | 0.290501 | 0.019739 | -1.468086 | -0.346174 | -0.086965 | 0.026492 | 1.019512 |
| 6 | 1.646777 | 0.772744 | -1.425228 | -0.562610 | -1.556076 | 0.533289 | -0.404271 | 1.676958 | 0.979516 | 0.415548 | 0.544719 | 0.433332 | 0.204271 |
| 7 | 1.124970 | 0.506236 | 0.738993 | 1.984485 | -0.928706 | -0.494097 | -0.707105 | -0.494778 | -1.642929 | 0.207467 | 0.181382 | 2.431721 | 0.848697 |
| 8 | 0.920059 | 1.438862 | -2.048354 | 1.503567 | -2.801303 | 0.567132 | -0.745441 | 0.569519 | 0.130917 | 1.965436 | -0.034797 | 1.164878 | 0.074074 |
| 9 | 0.182544 | 0.310622 | 0.067722 | 0.870138 | 0.168366 | 0.682045 | -0.191296 | -0.144962 | -0.630020 | -0.284032 | -0.315301 | 0.344841 | 0.495167 |
| 10 | 0.168663 | 0.389450 | 0.034360 | 1.213392 | 0.248437 | 0.870618 | -0.460824 | -0.174734 | -0.710502 | -0.228408 | -0.265153 | 0.349416 | 0.584114 |
| 11 | 0.153010 | -0.118336 | 0.639531 | 1.504522 | 0.937909 | 0.356048 | -0.089987 | -0.628522 | 0.064203 | 0.966049 | 0.403915 | -0.943626 | 0.173874 |
| 12 | 0.132578 | 0.261966 | -2.871493 | -3.398160 | -0.256458 | 1.596532 | -0.358711 | 0.175955 | -0.499075 | 0.949085 | 2.235525 | -0.197712 | -0.272366 |
| 13 | 1.094629 | 0.885150 | -1.130672 | -0.083270 | 0.672482 | 0.750453 | -0.863949 | 0.140540 | 0.423312 | -0.305155 | -0.424905 | 0.318660 | 0.885900 |
| 14 | 0.771472 | 0.364448 | -0.454696 | 0.434253 | 0.912699 | 0.745924 | -0.073390 | -0.406473 | 0.450765 | 0.323180 | -0.458826 | -0.132295 | 0.495454 |
| 15 | 0.677561 | 0.166795 | 0.746471 | 0.075191 | 0.867924 | -1.621678 | 0.771146 | -0.067286 | 0.557998 | -0.093593 | 0.020233 | -0.800013 | -0.629188 |
| 16 | -0.032353 | 1.227345 | -0.188580 | 0.927210 | 0.016663 | 1.001867 | -0.473811 | 0.782387 | 1.542760 | -0.345478 | -0.838104 | -0.439443 | 1.179204 |
| 17 | 0.459031 | 1.258961 | -0.329412 | 1.391790 | -0.208888 | 1.059241 | -1.245671 | 0.619153 | 0.245780 | 0.644548 | -0.602629 | -0.928581 | 0.739885 |
| 18 | -0.359172 | 0.051214 | -0.603962 | 0.778896 | 1.630471 | 1.802477 | 1.486205 | -0.140738 | -0.894366 | 0.736624 | 2.114721 | 1.078175 | -0.965785 |
| 19 | 0.209859 | -0.615399 | -0.676895 | 0.735655 | 0.805509 | -0.696793 | 1.073068 | 0.240429 | -0.205934 | -0.759693 | 0.672843 | 0.569482 | -0.455391 |
| 20 | 0.127381 | -0.265099 | -0.258801 | -0.127568 | 0.649447 | 0.244473 | 1.897421 | -0.344616 | -0.593159 | 0.065147 | 1.787607 | 1.219355 | -0.171813 |
| 21 | 1.222717 | 0.409860 | 1.311826 | 0.703873 | 0.322062 | 0.305461 | -0.522644 | -0.750833 | 0.001767 | 0.017953 | 0.254329 | -0.227762 | -0.614790 |
| 22 | 1.173352 | 0.490500 | 0.742825 | -0.028159 | -0.272396 | -0.502733 | -0.759443 | -1.031924 | -0.157975 | 0.075659 | 0.604220 | 0.143298 | -0.001849 |
| 23 | 1.069960 | 0.858822 | -0.795544 | 0.076688 | 0.851875 | 0.735014 | -0.758779 | 0.065595 | 0.532667 | -0.391858 | -0.497019 | 0.240822 | 0.848126 |
| 24 | 0.581377 | -0.804045 | 0.399887 | 1.535671 | 0.245878 | 0.904192 | -0.233991 | -0.925983 | 0.212280 | 0.499535 | -0.024926 | -0.925999 | 1.294925 |
| 25 | 0.161110 | 0.025075 | 0.716318 | 1.532230 | 0.889883 | 0.353167 | -0.058787 | -0.593046 | 0.093773 | 0.927085 | 0.199691 | -0.979872 | 0.232850 |
| 26 | 0.431443 | 0.442713 | 0.259120 | 0.045533 | 0.102675 | 0.367606 | 0.054320 | 0.942924 | 0.180609 | 0.550983 | 0.265291 | 0.321252 | -0.830969 |
| 27 | 0.344525 | -1.140315 | -0.725453 | -0.547965 | 0.449924 | 0.303904 | 1.053624 | 1.051712 | 0.509322 | 0.181611 | -0.519979 | -1.134490 | -1.439105 |
| 28 | -0.041565 | 0.671274 | 0.195143 | 0.247294 | 0.531620 | 1.050124 | 0.311358 | 0.988161 | -0.198869 | 0.387795 | 1.757366 | 1.351684 | 0.194840 |
| 29 | 0.417845 | -1.134173 | -0.760709 | -0.605264 | 0.077464 | 0.533333 | 1.104524 | 2.124971 | 0.083548 | 0.801730 | 0.092534 | -1.281628 | -1.468782 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 225 | 1.532114 | -1.060006 | -0.434145 | -0.999435 | -1.259462 | 0.039140 | -0.802013 | -0.655286 | 0.714448 | 1.005958 | -0.086372 | 0.537392 | 0.054440 |
| 226 | -0.942320 | 1.172080 | 0.506725 | -0.230675 | -0.104635 | 0.898742 | -1.107001 | -1.182148 | -0.940991 | 0.232366 | 1.778224 | 0.975251 | 1.731084 |
| 227 | 1.421974 | 0.631029 | -0.563813 | -0.694595 | -0.673270 | 0.929022 | 0.476907 | -1.025173 | -0.813644 | -0.060006 | -0.738730 | -0.558099 | 0.057654 |
| 228 | -1.473385 | -0.806223 | 1.849423 | -1.252541 | 0.941013 | -0.872947 | -1.812392 | -0.242718 | -0.097212 | -0.510500 | -0.232195 | -0.546399 | 0.945530 |
| 229 | -1.135926 | -0.772372 | 1.164844 | -1.022517 | 0.630202 | -0.496999 | -1.101656 | -0.168921 | -0.295159 | -0.587401 | 0.369033 | -0.266325 | 0.604469 |
| 230 | -1.085049 | 0.879566 | 0.442593 | 0.128917 | 0.393498 | 0.531555 | 0.392194 | 1.418515 | 0.891015 | -0.348926 | -0.756201 | -0.838584 | -0.015971 |
| 231 | -0.352258 | 0.556982 | 0.530520 | 0.443818 | 0.300921 | 0.032128 | -0.797384 | -0.573532 | 0.398084 | 0.328875 | -0.274964 | -1.300920 | 0.254456 |
| 232 | -1.190363 | 0.797356 | 0.758472 | 0.587917 | 0.890540 | 0.471925 | 0.105793 | 0.680721 | 0.230834 | -0.150709 | -0.816744 | -0.470618 | 0.371198 |
| 233 | -0.651003 | -0.586618 | 1.326854 | -0.451354 | 0.507113 | 0.165474 | -0.919675 | -0.448249 | -1.310940 | -1.372737 | 0.406029 | -1.414627 | -0.434858 |
| 234 | -1.459511 | -0.516281 | 1.631699 | -1.141842 | 0.584621 | -0.458541 | -1.428877 | -0.934556 | -0.216455 | -0.049794 | 0.095580 | 0.387068 | 0.693730 |
| 235 | -0.726984 | 0.702447 | 0.798069 | -0.320660 | 0.530902 | 1.019988 | 0.144995 | 0.207847 | 0.039592 | 0.220761 | 0.762941 | 0.575034 | 0.671517 |
| 236 | -0.300986 | -0.404923 | 0.715406 | 0.245380 | -0.427936 | -0.334843 | -0.228084 | -0.330898 | -0.674327 | 0.199560 | 0.827455 | 0.016433 | 0.866789 |
| 237 | -0.736244 | 0.088611 | 0.910051 | 0.437100 | 0.258256 | 0.363828 | -0.415290 | -0.717445 | -0.012727 | 0.436925 | -0.786954 | -1.217376 | 0.352825 |
| 238 | 0.610473 | -2.664315 | 1.303652 | -2.022376 | 1.500032 | -1.280926 | -1.249533 | 0.432111 | -0.768558 | 0.291156 | -0.092312 | 0.053770 | -0.401166 |
| 239 | -2.045424 | -2.954642 | 0.302601 | -0.868092 | -1.038134 | -1.230777 | 0.514329 | 0.057591 | -1.023895 | 0.275395 | -1.450282 | 0.386242 | 0.318763 |
| 240 | 0.329793 | -1.367570 | -1.454329 | -0.207924 | -0.723609 | -0.149025 | -0.085298 | -0.011595 | -0.240239 | -0.009120 | -0.325229 | -0.025722 | 0.114182 |
| 241 | -1.919591 | 1.382172 | -0.134161 | 0.837967 | -0.687780 | 0.944303 | -0.258652 | -0.742178 | 0.386031 | -1.178099 | -1.843543 | -0.710556 | -0.318561 |
| 242 | -2.087669 | 1.400006 | -0.494964 | 0.451717 | -0.759188 | 0.736625 | 0.133121 | -0.196031 | 1.121231 | 0.474128 | -0.345937 | -0.409324 | -0.442069 |
| 243 | -2.131652 | 0.439305 | -0.612226 | 0.854126 | -0.494550 | 0.825299 | 0.301373 | -0.018964 | 0.690556 | -0.078762 | -0.709495 | -0.075857 | -0.418656 |
| 244 | -1.611989 | -0.756403 | -0.410917 | 1.075909 | 0.297336 | -1.317576 | 1.115011 | -0.467065 | -0.768378 | 1.615499 | 1.611125 | -1.018782 | -1.798744 |
| 245 | -0.142010 | 0.000190 | -0.063461 | -0.506353 | -0.386942 | -0.256144 | 0.270621 | -1.497417 | 0.507892 | 0.456828 | -0.431169 | -0.978417 | 0.015849 |
| 246 | -1.263975 | -1.168117 | -1.396090 | -0.312016 | 1.862268 | 1.400290 | 0.646060 | -0.686864 | 0.418524 | -0.069926 | -0.653856 | -0.853617 | -0.106814 |
| 247 | -0.507700 | 0.899825 | 1.510153 | 1.083642 | 2.081451 | 0.589016 | 0.901321 | 0.658808 | 0.152596 | 0.176442 | -0.447633 | 0.287838 | 0.650479 |
| 248 | -0.159768 | 0.518093 | 2.197018 | 0.698491 | 0.476336 | -2.014255 | -1.614667 | -0.397282 | -1.781932 | -0.208894 | 1.650551 | -0.771436 | -0.987237 |
| 249 | -1.037899 | 1.016712 | 2.774230 | 0.665468 | -0.385673 | 0.587263 | -0.121609 | -0.331379 | 0.622484 | -0.387131 | -0.276584 | 0.218207 | 1.689216 |
| 250 | -0.526923 | -1.169944 | 0.474875 | -0.789231 | 0.369827 | -0.537003 | -1.089843 | -0.173366 | -0.023237 | -0.142334 | 0.740065 | 0.813114 | 0.872556 |
| 251 | -0.770856 | -1.024349 | -0.019140 | -0.097521 | 0.092703 | 0.369242 | -0.273901 | 0.190740 | -0.074032 | 0.113055 | 0.140291 | -0.696275 | 0.166679 |
| 252 | -0.905458 | -0.790575 | 0.206164 | -0.723816 | -0.444860 | 0.107833 | -0.734514 | -0.533865 | -0.634334 | 0.320526 | 0.088428 | -0.348210 | 0.347201 |
| 253 | -1.378235 | -0.338405 | 0.016815 | -0.394563 | 0.034043 | 1.023865 | -0.303960 | -1.316121 | 0.198697 | 0.670577 | 0.809574 | 0.580565 | 0.056004 |
| 254 | -0.199959 | -2.035812 | -0.904507 | -1.511975 | -0.437843 | 0.262972 | -1.943788 | -1.963300 | -2.256227 | 0.354369 | -0.039829 | 0.882325 | 0.139307 |
255 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[3315.0, 2972.7888695817974, 2748.18187155972, 2544.9420084212106, 2413.687059384553, 2278.037996783226, 2213.3487507256823, 2123.4282707474663, 2067.8299633414163, 1977.777252698108, 1956.5229777214513, 1880.0296166971755, 1815.5096049846275, 1785.9955747862728]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82ae84f98>]
K=6
kmeans_mfcc = KMeans(n_clusters=6, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=6, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.339415 | 0.847773 | 0.497198 | -0.389310 | 1.225458 | 1.947033 | -0.736267 | 0.492219 | 0.576682 | 1.504697 | -1.796460 | 0.724954 | 0.958600 | 4 | 0 |
| 1 | 0.587658 | -1.195426 | 0.636375 | 0.199876 | 0.765321 | 0.061181 | 0.379367 | -0.440867 | 0.232893 | 1.339920 | 0.110001 | 0.807525 | 0.815678 | 2 | 0 |
| 2 | 1.465595 | -2.307943 | 0.354567 | -0.058273 | -1.298853 | -0.811453 | -1.551580 | -3.934320 | -1.079432 | 2.546130 | 1.421407 | 0.639359 | 0.199094 | 2 | 0 |
| 3 | 0.749403 | -1.690498 | -0.125200 | -1.016135 | 0.825845 | 0.271444 | -0.104786 | -0.992141 | 0.049182 | 1.425948 | -0.343269 | -0.789558 | -0.411898 | 2 | 0 |
| 4 | -0.280577 | 0.393332 | 0.744917 | 2.411400 | -0.777421 | -0.420018 | 1.258355 | -1.544565 | -0.498071 | 0.421527 | -0.632908 | -0.056846 | -0.072348 | 0 | 0 |
| 5 | -0.158690 | 0.404891 | -0.147920 | -0.299241 | -0.786974 | 0.697216 | 0.290501 | 0.019739 | -1.468086 | -0.346174 | -0.086965 | 0.026492 | 1.019512 | 1 | 0 |
| 6 | 1.646777 | 0.772744 | -1.425228 | -0.562610 | -1.556076 | 0.533289 | -0.404271 | 1.676958 | 0.979516 | 0.415548 | 0.544719 | 0.433332 | 0.204271 | 1 | 0 |
| 7 | 1.124970 | 0.506236 | 0.738993 | 1.984485 | -0.928706 | -0.494097 | -0.707105 | -0.494778 | -1.642929 | 0.207467 | 0.181382 | 2.431721 | 0.848697 | 0 | 0 |
| 8 | 0.920059 | 1.438862 | -2.048354 | 1.503567 | -2.801303 | 0.567132 | -0.745441 | 0.569519 | 0.130917 | 1.965436 | -0.034797 | 1.164878 | 0.074074 | 1 | 0 |
| 9 | 0.182544 | 0.310622 | 0.067722 | 0.870138 | 0.168366 | 0.682045 | -0.191296 | -0.144962 | -0.630020 | -0.284032 | -0.315301 | 0.344841 | 0.495167 | 4 | 0 |
| 10 | 0.168663 | 0.389450 | 0.034360 | 1.213392 | 0.248437 | 0.870618 | -0.460824 | -0.174734 | -0.710502 | -0.228408 | -0.265153 | 0.349416 | 0.584114 | 4 | 0 |
| 11 | 0.153010 | -0.118336 | 0.639531 | 1.504522 | 0.937909 | 0.356048 | -0.089987 | -0.628522 | 0.064203 | 0.966049 | 0.403915 | -0.943626 | 0.173874 | 4 | 0 |
| 12 | 0.132578 | 0.261966 | -2.871493 | -3.398160 | -0.256458 | 1.596532 | -0.358711 | 0.175955 | -0.499075 | 0.949085 | 2.235525 | -0.197712 | -0.272366 | 1 | 0 |
| 13 | 1.094629 | 0.885150 | -1.130672 | -0.083270 | 0.672482 | 0.750453 | -0.863949 | 0.140540 | 0.423312 | -0.305155 | -0.424905 | 0.318660 | 0.885900 | 4 | 0 |
| 14 | 0.771472 | 0.364448 | -0.454696 | 0.434253 | 0.912699 | 0.745924 | -0.073390 | -0.406473 | 0.450765 | 0.323180 | -0.458826 | -0.132295 | 0.495454 | 4 | 0 |
| 15 | 0.677561 | 0.166795 | 0.746471 | 0.075191 | 0.867924 | -1.621678 | 0.771146 | -0.067286 | 0.557998 | -0.093593 | 0.020233 | -0.800013 | -0.629188 | 3 | 0 |
| 16 | -0.032353 | 1.227345 | -0.188580 | 0.927210 | 0.016663 | 1.001867 | -0.473811 | 0.782387 | 1.542760 | -0.345478 | -0.838104 | -0.439443 | 1.179204 | 4 | 0 |
| 17 | 0.459031 | 1.258961 | -0.329412 | 1.391790 | -0.208888 | 1.059241 | -1.245671 | 0.619153 | 0.245780 | 0.644548 | -0.602629 | -0.928581 | 0.739885 | 4 | 0 |
| 18 | -0.359172 | 0.051214 | -0.603962 | 0.778896 | 1.630471 | 1.802477 | 1.486205 | -0.140738 | -0.894366 | 0.736624 | 2.114721 | 1.078175 | -0.965785 | 4 | 0 |
| 19 | 0.209859 | -0.615399 | -0.676895 | 0.735655 | 0.805509 | -0.696793 | 1.073068 | 0.240429 | -0.205934 | -0.759693 | 0.672843 | 0.569482 | -0.455391 | 4 | 0 |
| 20 | 0.127381 | -0.265099 | -0.258801 | -0.127568 | 0.649447 | 0.244473 | 1.897421 | -0.344616 | -0.593159 | 0.065147 | 1.787607 | 1.219355 | -0.171813 | 4 | 0 |
| 21 | 1.222717 | 0.409860 | 1.311826 | 0.703873 | 0.322062 | 0.305461 | -0.522644 | -0.750833 | 0.001767 | 0.017953 | 0.254329 | -0.227762 | -0.614790 | 0 | 0 |
| 22 | 1.173352 | 0.490500 | 0.742825 | -0.028159 | -0.272396 | -0.502733 | -0.759443 | -1.031924 | -0.157975 | 0.075659 | 0.604220 | 0.143298 | -0.001849 | 0 | 0 |
| 23 | 1.069960 | 0.858822 | -0.795544 | 0.076688 | 0.851875 | 0.735014 | -0.758779 | 0.065595 | 0.532667 | -0.391858 | -0.497019 | 0.240822 | 0.848126 | 4 | 0 |
| 24 | 0.581377 | -0.804045 | 0.399887 | 1.535671 | 0.245878 | 0.904192 | -0.233991 | -0.925983 | 0.212280 | 0.499535 | -0.024926 | -0.925999 | 1.294925 | 4 | 0 |
| 25 | 0.161110 | 0.025075 | 0.716318 | 1.532230 | 0.889883 | 0.353167 | -0.058787 | -0.593046 | 0.093773 | 0.927085 | 0.199691 | -0.979872 | 0.232850 | 4 | 0 |
| 26 | 0.431443 | 0.442713 | 0.259120 | 0.045533 | 0.102675 | 0.367606 | 0.054320 | 0.942924 | 0.180609 | 0.550983 | 0.265291 | 0.321252 | -0.830969 | 4 | 0 |
| 27 | 0.344525 | -1.140315 | -0.725453 | -0.547965 | 0.449924 | 0.303904 | 1.053624 | 1.051712 | 0.509322 | 0.181611 | -0.519979 | -1.134490 | -1.439105 | 3 | 0 |
| 28 | -0.041565 | 0.671274 | 0.195143 | 0.247294 | 0.531620 | 1.050124 | 0.311358 | 0.988161 | -0.198869 | 0.387795 | 1.757366 | 1.351684 | 0.194840 | 4 | 0 |
| 29 | 0.417845 | -1.134173 | -0.760709 | -0.605264 | 0.077464 | 0.533333 | 1.104524 | 2.124971 | 0.083548 | 0.801730 | 0.092534 | -1.281628 | -1.468782 | 3 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 225 | 1.532114 | -1.060006 | -0.434145 | -0.999435 | -1.259462 | 0.039140 | -0.802013 | -0.655286 | 0.714448 | 1.005958 | -0.086372 | 0.537392 | 0.054440 | 1 | 1 |
| 226 | -0.942320 | 1.172080 | 0.506725 | -0.230675 | -0.104635 | 0.898742 | -1.107001 | -1.182148 | -0.940991 | 0.232366 | 1.778224 | 0.975251 | 1.731084 | 0 | 1 |
| 227 | 1.421974 | 0.631029 | -0.563813 | -0.694595 | -0.673270 | 0.929022 | 0.476907 | -1.025173 | -0.813644 | -0.060006 | -0.738730 | -0.558099 | 0.057654 | 1 | 1 |
| 228 | -1.473385 | -0.806223 | 1.849423 | -1.252541 | 0.941013 | -0.872947 | -1.812392 | -0.242718 | -0.097212 | -0.510500 | -0.232195 | -0.546399 | 0.945530 | 2 | 1 |
| 229 | -1.135926 | -0.772372 | 1.164844 | -1.022517 | 0.630202 | -0.496999 | -1.101656 | -0.168921 | -0.295159 | -0.587401 | 0.369033 | -0.266325 | 0.604469 | 2 | 1 |
| 230 | -1.085049 | 0.879566 | 0.442593 | 0.128917 | 0.393498 | 0.531555 | 0.392194 | 1.418515 | 0.891015 | -0.348926 | -0.756201 | -0.838584 | -0.015971 | 3 | 1 |
| 231 | -0.352258 | 0.556982 | 0.530520 | 0.443818 | 0.300921 | 0.032128 | -0.797384 | -0.573532 | 0.398084 | 0.328875 | -0.274964 | -1.300920 | 0.254456 | 0 | 1 |
| 232 | -1.190363 | 0.797356 | 0.758472 | 0.587917 | 0.890540 | 0.471925 | 0.105793 | 0.680721 | 0.230834 | -0.150709 | -0.816744 | -0.470618 | 0.371198 | 4 | 1 |
| 233 | -0.651003 | -0.586618 | 1.326854 | -0.451354 | 0.507113 | 0.165474 | -0.919675 | -0.448249 | -1.310940 | -1.372737 | 0.406029 | -1.414627 | -0.434858 | 2 | 1 |
| 234 | -1.459511 | -0.516281 | 1.631699 | -1.141842 | 0.584621 | -0.458541 | -1.428877 | -0.934556 | -0.216455 | -0.049794 | 0.095580 | 0.387068 | 0.693730 | 2 | 1 |
| 235 | -0.726984 | 0.702447 | 0.798069 | -0.320660 | 0.530902 | 1.019988 | 0.144995 | 0.207847 | 0.039592 | 0.220761 | 0.762941 | 0.575034 | 0.671517 | 4 | 1 |
| 236 | -0.300986 | -0.404923 | 0.715406 | 0.245380 | -0.427936 | -0.334843 | -0.228084 | -0.330898 | -0.674327 | 0.199560 | 0.827455 | 0.016433 | 0.866789 | 0 | 1 |
| 237 | -0.736244 | 0.088611 | 0.910051 | 0.437100 | 0.258256 | 0.363828 | -0.415290 | -0.717445 | -0.012727 | 0.436925 | -0.786954 | -1.217376 | 0.352825 | 2 | 1 |
| 238 | 0.610473 | -2.664315 | 1.303652 | -2.022376 | 1.500032 | -1.280926 | -1.249533 | 0.432111 | -0.768558 | 0.291156 | -0.092312 | 0.053770 | -0.401166 | 2 | 1 |
| 239 | -2.045424 | -2.954642 | 0.302601 | -0.868092 | -1.038134 | -1.230777 | 0.514329 | 0.057591 | -1.023895 | 0.275395 | -1.450282 | 0.386242 | 0.318763 | 2 | 1 |
| 240 | 0.329793 | -1.367570 | -1.454329 | -0.207924 | -0.723609 | -0.149025 | -0.085298 | -0.011595 | -0.240239 | -0.009120 | -0.325229 | -0.025722 | 0.114182 | 1 | 1 |
| 241 | -1.919591 | 1.382172 | -0.134161 | 0.837967 | -0.687780 | 0.944303 | -0.258652 | -0.742178 | 0.386031 | -1.178099 | -1.843543 | -0.710556 | -0.318561 | 3 | 1 |
| 242 | -2.087669 | 1.400006 | -0.494964 | 0.451717 | -0.759188 | 0.736625 | 0.133121 | -0.196031 | 1.121231 | 0.474128 | -0.345937 | -0.409324 | -0.442069 | 4 | 1 |
| 243 | -2.131652 | 0.439305 | -0.612226 | 0.854126 | -0.494550 | 0.825299 | 0.301373 | -0.018964 | 0.690556 | -0.078762 | -0.709495 | -0.075857 | -0.418656 | 4 | 1 |
| 244 | -1.611989 | -0.756403 | -0.410917 | 1.075909 | 0.297336 | -1.317576 | 1.115011 | -0.467065 | -0.768378 | 1.615499 | 1.611125 | -1.018782 | -1.798744 | 2 | 1 |
| 245 | -0.142010 | 0.000190 | -0.063461 | -0.506353 | -0.386942 | -0.256144 | 0.270621 | -1.497417 | 0.507892 | 0.456828 | -0.431169 | -0.978417 | 0.015849 | 2 | 1 |
| 246 | -1.263975 | -1.168117 | -1.396090 | -0.312016 | 1.862268 | 1.400290 | 0.646060 | -0.686864 | 0.418524 | -0.069926 | -0.653856 | -0.853617 | -0.106814 | 2 | 1 |
| 247 | -0.507700 | 0.899825 | 1.510153 | 1.083642 | 2.081451 | 0.589016 | 0.901321 | 0.658808 | 0.152596 | 0.176442 | -0.447633 | 0.287838 | 0.650479 | 4 | 1 |
| 248 | -0.159768 | 0.518093 | 2.197018 | 0.698491 | 0.476336 | -2.014255 | -1.614667 | -0.397282 | -1.781932 | -0.208894 | 1.650551 | -0.771436 | -0.987237 | 0 | 1 |
| 249 | -1.037899 | 1.016712 | 2.774230 | 0.665468 | -0.385673 | 0.587263 | -0.121609 | -0.331379 | 0.622484 | -0.387131 | -0.276584 | 0.218207 | 1.689216 | 0 | 1 |
| 250 | -0.526923 | -1.169944 | 0.474875 | -0.789231 | 0.369827 | -0.537003 | -1.089843 | -0.173366 | -0.023237 | -0.142334 | 0.740065 | 0.813114 | 0.872556 | 2 | 1 |
| 251 | -0.770856 | -1.024349 | -0.019140 | -0.097521 | 0.092703 | 0.369242 | -0.273901 | 0.190740 | -0.074032 | 0.113055 | 0.140291 | -0.696275 | 0.166679 | 2 | 1 |
| 252 | -0.905458 | -0.790575 | 0.206164 | -0.723816 | -0.444860 | 0.107833 | -0.734514 | -0.533865 | -0.634334 | 0.320526 | 0.088428 | -0.348210 | 0.347201 | 2 | 1 |
| 253 | -1.378235 | -0.338405 | 0.016815 | -0.394563 | 0.034043 | 1.023865 | -0.303960 | -1.316121 | 0.198697 | 0.670577 | 0.809574 | 0.580565 | 0.056004 | 2 | 1 |
| 254 | -0.199959 | -2.035812 | -0.904507 | -1.511975 | -0.437843 | 0.262972 | -1.943788 | -1.963300 | -2.256227 | 0.354369 | -0.039829 | 0.882325 | 0.139307 | 2 | 1 |
255 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82aec1940>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[2]))
X = df_n_ps_std_mfcc[2].drop(columns='Cluster')
y = df_n_ps[2]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(231, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (30,), 'learning_rate_init': 0.001, 'max_iter': 1000}, que permiten obtener un Accuracy de 84.42% y un Kappa del 45.84
Tiempo total: 27.81 minutos
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_3" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_3 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_8 (Dense) (None, 30) 420 _________________________________________________________________ dense_9 (Dense) (None, 1) 31 ================================================================= Total params: 451 Trainable params: 451 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 231 samples, validate on 78 samples Epoch 1/1000 231/231 [==============================] - 0s 1ms/step - loss: 0.8119 - accuracy: 0.4589 - val_loss: 0.7457 - val_accuracy: 0.5385 Epoch 2/1000 231/231 [==============================] - 0s 126us/step - loss: 0.7766 - accuracy: 0.4935 - val_loss: 0.7248 - val_accuracy: 0.5513 Epoch 3/1000 231/231 [==============================] - 0s 61us/step - loss: 0.7467 - accuracy: 0.5411 - val_loss: 0.7062 - val_accuracy: 0.5385 Epoch 4/1000 231/231 [==============================] - 0s 52us/step - loss: 0.7166 - accuracy: 0.5931 - val_loss: 0.6883 - val_accuracy: 0.5513 Epoch 5/1000 231/231 [==============================] - 0s 56us/step - loss: 0.6911 - accuracy: 0.6147 - val_loss: 0.6739 - val_accuracy: 0.5641 Epoch 6/1000 231/231 [==============================] - 0s 56us/step - loss: 0.6673 - accuracy: 0.6234 - val_loss: 0.6607 - val_accuracy: 0.5897 Epoch 7/1000 231/231 [==============================] - 0s 78us/step - loss: 0.6460 - accuracy: 0.6364 - val_loss: 0.6480 - val_accuracy: 0.5897 Epoch 8/1000 231/231 [==============================] - 0s 56us/step - loss: 0.6258 - accuracy: 0.6623 - val_loss: 0.6371 - val_accuracy: 0.6154 Epoch 9/1000 231/231 [==============================] - 0s 74us/step - loss: 0.6079 - accuracy: 0.6883 - val_loss: 0.6269 - val_accuracy: 0.6667 Epoch 10/1000 231/231 [==============================] - 0s 61us/step - loss: 0.5915 - accuracy: 0.7229 - val_loss: 0.6189 - val_accuracy: 0.6923 Epoch 11/1000 231/231 [==============================] - 0s 61us/step - loss: 0.5768 - accuracy: 0.7403 - val_loss: 0.6096 - val_accuracy: 0.7051 Epoch 12/1000 231/231 [==============================] - 0s 56us/step - loss: 0.5636 - accuracy: 0.7532 - val_loss: 0.6025 - val_accuracy: 0.7308 Epoch 13/1000 231/231 [==============================] - 0s 61us/step - loss: 0.5516 - accuracy: 0.7662 - val_loss: 0.5950 - val_accuracy: 0.7564 Epoch 14/1000 231/231 [==============================] - 0s 56us/step - loss: 0.5405 - accuracy: 0.7619 - val_loss: 0.5887 - val_accuracy: 0.7692 Epoch 15/1000 231/231 [==============================] - 0s 61us/step - loss: 0.5306 - accuracy: 0.7706 - val_loss: 0.5840 - val_accuracy: 0.7564 Epoch 16/1000 231/231 [==============================] - 0s 91us/step - loss: 0.5200 - accuracy: 0.7835 - val_loss: 0.5799 - val_accuracy: 0.7564 Epoch 17/1000 231/231 [==============================] - 0s 65us/step - loss: 0.5108 - accuracy: 0.7879 - val_loss: 0.5743 - val_accuracy: 0.7436 Epoch 18/1000 231/231 [==============================] - 0s 91us/step - loss: 0.5024 - accuracy: 0.7965 - val_loss: 0.5700 - val_accuracy: 0.7179 Epoch 19/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4944 - accuracy: 0.8139 - val_loss: 0.5660 - val_accuracy: 0.7051 Epoch 20/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4864 - accuracy: 0.8139 - val_loss: 0.5606 - val_accuracy: 0.7179 Epoch 21/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4797 - accuracy: 0.8139 - val_loss: 0.5552 - val_accuracy: 0.7179 Epoch 22/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4734 - accuracy: 0.8182 - val_loss: 0.5507 - val_accuracy: 0.7308 Epoch 23/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4674 - accuracy: 0.8225 - val_loss: 0.5486 - val_accuracy: 0.7308 Epoch 24/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4615 - accuracy: 0.8225 - val_loss: 0.5479 - val_accuracy: 0.7436 Epoch 00024: ReduceLROnPlateau reducing learning rate to 0.0005000000237487257. Epoch 25/1000 231/231 [==============================] - ETA: 0s - loss: 0.4421 - accuracy: 0.84 - 0s 56us/step - loss: 0.4574 - accuracy: 0.8182 - val_loss: 0.5472 - val_accuracy: 0.7436 Epoch 26/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4552 - accuracy: 0.8182 - val_loss: 0.5465 - val_accuracy: 0.7564 Epoch 27/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4527 - accuracy: 0.8139 - val_loss: 0.5450 - val_accuracy: 0.7564 Epoch 28/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4502 - accuracy: 0.8139 - val_loss: 0.5441 - val_accuracy: 0.7564 Epoch 29/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4479 - accuracy: 0.8182 - val_loss: 0.5429 - val_accuracy: 0.7564 Epoch 30/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4459 - accuracy: 0.8182 - val_loss: 0.5424 - val_accuracy: 0.7436 Epoch 31/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4439 - accuracy: 0.8182 - val_loss: 0.5413 - val_accuracy: 0.7436 Epoch 32/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4419 - accuracy: 0.8182 - val_loss: 0.5407 - val_accuracy: 0.7436 Epoch 33/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4402 - accuracy: 0.8139 - val_loss: 0.5400 - val_accuracy: 0.7436 Epoch 34/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4385 - accuracy: 0.8139 - val_loss: 0.5398 - val_accuracy: 0.7436 Epoch 00034: ReduceLROnPlateau reducing learning rate to 0.0002500000118743628. Epoch 35/1000 231/231 [==============================] - 0s 52us/step - loss: 0.4370 - accuracy: 0.8139 - val_loss: 0.5392 - val_accuracy: 0.7436 Epoch 36/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4361 - accuracy: 0.8182 - val_loss: 0.5391 - val_accuracy: 0.7436 Epoch 37/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4353 - accuracy: 0.8182 - val_loss: 0.5390 - val_accuracy: 0.7436 Epoch 38/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4346 - accuracy: 0.8182 - val_loss: 0.5384 - val_accuracy: 0.7436 Epoch 39/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4337 - accuracy: 0.8182 - val_loss: 0.5387 - val_accuracy: 0.7436 Epoch 40/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4329 - accuracy: 0.8182 - val_loss: 0.5382 - val_accuracy: 0.7436 Epoch 41/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4320 - accuracy: 0.8182 - val_loss: 0.5377 - val_accuracy: 0.7436 Epoch 42/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4311 - accuracy: 0.8139 - val_loss: 0.5370 - val_accuracy: 0.7436 Epoch 43/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4304 - accuracy: 0.8095 - val_loss: 0.5363 - val_accuracy: 0.7436 Epoch 44/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4298 - accuracy: 0.8095 - val_loss: 0.5358 - val_accuracy: 0.7436 Epoch 00044: ReduceLROnPlateau reducing learning rate to 0.0001250000059371814. Epoch 45/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4291 - accuracy: 0.8139 - val_loss: 0.5355 - val_accuracy: 0.7436 Epoch 46/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4286 - accuracy: 0.8139 - val_loss: 0.5353 - val_accuracy: 0.7564 Epoch 47/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4282 - accuracy: 0.8182 - val_loss: 0.5353 - val_accuracy: 0.7436 Epoch 48/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4279 - accuracy: 0.8182 - val_loss: 0.5352 - val_accuracy: 0.7564 Epoch 49/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4275 - accuracy: 0.8182 - val_loss: 0.5350 - val_accuracy: 0.7436 Epoch 50/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4271 - accuracy: 0.8182 - val_loss: 0.5351 - val_accuracy: 0.7564 Epoch 51/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4267 - accuracy: 0.8182 - val_loss: 0.5350 - val_accuracy: 0.7436 Epoch 52/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4265 - accuracy: 0.8182 - val_loss: 0.5350 - val_accuracy: 0.7436 Epoch 53/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4261 - accuracy: 0.8182 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 54/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4258 - accuracy: 0.8182 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 00054: ReduceLROnPlateau reducing learning rate to 6.25000029685907e-05. Epoch 55/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4255 - accuracy: 0.8182 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 56/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4253 - accuracy: 0.8182 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 57/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4251 - accuracy: 0.8182 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 58/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4250 - accuracy: 0.8182 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 59/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4248 - accuracy: 0.8182 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 60/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4246 - accuracy: 0.8182 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 61/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4244 - accuracy: 0.8182 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 62/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4243 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 63/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4241 - accuracy: 0.8139 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 64/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4240 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 00064: ReduceLROnPlateau reducing learning rate to 3.125000148429535e-05. Epoch 65/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4238 - accuracy: 0.8182 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 66/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4237 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 67/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4236 - accuracy: 0.8139 - val_loss: 0.5348 - val_accuracy: 0.7436 Epoch 68/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4235 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 69/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4235 - accuracy: 0.8139 - val_loss: 0.5347 - val_accuracy: 0.7436 Epoch 70/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4234 - accuracy: 0.8139 - val_loss: 0.5346 - val_accuracy: 0.7436 Epoch 71/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4233 - accuracy: 0.8139 - val_loss: 0.5346 - val_accuracy: 0.7436 Epoch 72/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4232 - accuracy: 0.8139 - val_loss: 0.5346 - val_accuracy: 0.7436 Epoch 73/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4232 - accuracy: 0.8139 - val_loss: 0.5346 - val_accuracy: 0.7436 Epoch 74/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4231 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 00074: ReduceLROnPlateau reducing learning rate to 1.5625000742147677e-05. Epoch 75/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4230 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 76/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4230 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 77/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4229 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 78/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4229 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 79/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4228 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 80/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4228 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 81/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4227 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 82/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4227 - accuracy: 0.8139 - val_loss: 0.5345 - val_accuracy: 0.7436 Epoch 83/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4227 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 84/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4226 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 00084: ReduceLROnPlateau reducing learning rate to 7.812500371073838e-06. Epoch 85/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4226 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 86/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 87/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 88/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 89/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5344 - val_accuracy: 0.7436 Epoch 90/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4225 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 91/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 92/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 93/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 94/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 00094: ReduceLROnPlateau reducing learning rate to 3.906250185536919e-06. Epoch 95/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4224 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 96/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 97/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 98/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 99/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 100/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 101/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 102/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 103/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 104/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 00104: ReduceLROnPlateau reducing learning rate to 1.9531250927684596e-06. Epoch 105/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4223 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 106/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 107/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 108/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 109/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 110/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 111/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 112/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 113/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5343 - val_accuracy: 0.7436 Epoch 114/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00114: ReduceLROnPlateau reducing learning rate to 9.765625463842298e-07. Epoch 115/1000 231/231 [==============================] - 0s 130us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 116/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 117/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 118/1000 231/231 [==============================] - 0s 61us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 119/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 120/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 121/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 122/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 123/1000 231/231 [==============================] - 0s 65us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 124/1000 231/231 [==============================] - 0s 56us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00124: ReduceLROnPlateau reducing learning rate to 4.882812731921149e-07. Epoch 125/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 126/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 127/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 128/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 129/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 130/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 131/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 132/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 133/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 134/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00134: ReduceLROnPlateau reducing learning rate to 2.4414063659605745e-07. Epoch 135/1000 231/231 [==============================] - 0s 134us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 136/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 137/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 138/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 139/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 140/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 141/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 142/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 143/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 144/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00144: ReduceLROnPlateau reducing learning rate to 1.2207031829802872e-07. Epoch 145/1000 231/231 [==============================] - 0s 1ms/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 146/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 147/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 148/1000 231/231 [==============================] - 0s 114us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 149/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 150/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 151/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 152/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 153/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4222 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 154/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00154: ReduceLROnPlateau reducing learning rate to 6.103515914901436e-08. Epoch 155/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 156/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 157/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 158/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 159/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 160/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 161/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 162/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 163/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 164/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00164: ReduceLROnPlateau reducing learning rate to 3.051757957450718e-08. Epoch 165/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 166/1000 231/231 [==============================] - 0s 143us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 167/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 168/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 169/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 170/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 171/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 172/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 173/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 174/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00174: ReduceLROnPlateau reducing learning rate to 1.525878978725359e-08. Epoch 175/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 176/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 177/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 178/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 179/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 180/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 181/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 182/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 183/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 184/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00184: ReduceLROnPlateau reducing learning rate to 7.629394893626795e-09. Epoch 185/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 186/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 187/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 188/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 189/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 190/1000 231/231 [==============================] - 0s 125us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 191/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 192/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 193/1000 231/231 [==============================] - ETA: 0s - loss: 0.3107 - accuracy: 0.90 - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 194/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00194: ReduceLROnPlateau reducing learning rate to 3.814697446813398e-09. Epoch 195/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 196/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 197/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 198/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 199/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 200/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 201/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 202/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 203/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 204/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00204: ReduceLROnPlateau reducing learning rate to 1.907348723406699e-09. Epoch 205/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 206/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 207/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 208/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 209/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 210/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 211/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 212/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 213/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 214/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00214: ReduceLROnPlateau reducing learning rate to 9.536743617033494e-10. Epoch 215/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 216/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 217/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 218/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 219/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 220/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 221/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 222/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 223/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 224/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00224: ReduceLROnPlateau reducing learning rate to 4.768371808516747e-10. Epoch 225/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 226/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 227/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 228/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 229/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 230/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 231/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 232/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 233/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 234/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00234: ReduceLROnPlateau reducing learning rate to 2.3841859042583735e-10. Epoch 235/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 236/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 237/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 238/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 239/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 240/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 241/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 242/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 243/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 244/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00244: ReduceLROnPlateau reducing learning rate to 1.1920929521291868e-10. Epoch 245/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 246/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 247/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 248/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 249/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 250/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 251/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 252/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 253/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 254/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00254: ReduceLROnPlateau reducing learning rate to 5.960464760645934e-11. Epoch 255/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 256/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 257/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 258/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 259/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 260/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 261/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 262/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 263/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 264/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00264: ReduceLROnPlateau reducing learning rate to 2.980232380322967e-11. Epoch 265/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 266/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 267/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 268/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 269/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 270/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 271/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 272/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 273/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 274/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00274: ReduceLROnPlateau reducing learning rate to 1.4901161901614834e-11. Epoch 275/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 276/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 277/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 278/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 279/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 280/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 281/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 282/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 283/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 284/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00284: ReduceLROnPlateau reducing learning rate to 7.450580950807417e-12. Epoch 285/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 286/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 287/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 288/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 289/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 290/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 291/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 292/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 293/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 294/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00294: ReduceLROnPlateau reducing learning rate to 3.725290475403709e-12. Epoch 295/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 296/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 297/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 298/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 299/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 300/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 301/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 302/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 303/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 304/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00304: ReduceLROnPlateau reducing learning rate to 1.8626452377018543e-12. Epoch 305/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 306/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 307/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 308/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 309/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 310/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 311/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 312/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 313/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 314/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00314: ReduceLROnPlateau reducing learning rate to 9.313226188509272e-13. Epoch 315/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 316/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 317/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 318/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 319/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 320/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 321/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 322/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 323/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 324/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00324: ReduceLROnPlateau reducing learning rate to 4.656613094254636e-13. Epoch 325/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 326/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 327/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 328/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 329/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 330/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 331/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 332/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 333/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 334/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00334: ReduceLROnPlateau reducing learning rate to 2.328306547127318e-13. Epoch 335/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 336/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 337/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 338/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 339/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 340/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 341/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 342/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 343/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 344/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00344: ReduceLROnPlateau reducing learning rate to 1.164153273563659e-13. Epoch 345/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 346/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 347/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 348/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 349/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 350/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 351/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 352/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 353/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 354/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00354: ReduceLROnPlateau reducing learning rate to 5.820766367818295e-14. Epoch 355/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 356/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 357/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 358/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 359/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 360/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 361/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 362/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 363/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 364/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00364: ReduceLROnPlateau reducing learning rate to 2.9103831839091474e-14. Epoch 365/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 366/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 367/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 368/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 369/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 370/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 371/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 372/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 373/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 374/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00374: ReduceLROnPlateau reducing learning rate to 1.4551915919545737e-14. Epoch 375/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 376/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 377/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 378/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 379/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 380/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 381/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 382/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 383/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 384/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00384: ReduceLROnPlateau reducing learning rate to 7.275957959772868e-15. Epoch 385/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 386/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 387/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 388/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 389/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 390/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 391/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 392/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 393/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 394/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00394: ReduceLROnPlateau reducing learning rate to 3.637978979886434e-15. Epoch 395/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 396/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 397/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 398/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 399/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 400/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 401/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 402/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 403/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 404/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00404: ReduceLROnPlateau reducing learning rate to 1.818989489943217e-15. Epoch 405/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 406/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 407/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 408/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 409/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 410/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 411/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 412/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 413/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 414/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00414: ReduceLROnPlateau reducing learning rate to 9.094947449716085e-16. Epoch 415/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 416/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 417/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 418/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 419/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 420/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 421/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 422/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 423/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 424/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00424: ReduceLROnPlateau reducing learning rate to 4.547473724858043e-16. Epoch 425/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 426/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 427/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 428/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 429/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 430/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 431/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 432/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 433/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 434/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00434: ReduceLROnPlateau reducing learning rate to 2.2737368624290214e-16. Epoch 435/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 436/1000 231/231 [==============================] - ETA: 0s - loss: 0.6507 - accuracy: 0.62 - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 437/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 438/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 439/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 440/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 441/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 442/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 443/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 444/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00444: ReduceLROnPlateau reducing learning rate to 1.1368684312145107e-16. Epoch 445/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 446/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 447/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 448/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 449/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 450/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 451/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 452/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 453/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 454/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00454: ReduceLROnPlateau reducing learning rate to 5.684342156072553e-17. Epoch 455/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 456/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 457/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 458/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 459/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 460/1000 231/231 [==============================] - 0s 203us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 461/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 462/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 463/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 464/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00464: ReduceLROnPlateau reducing learning rate to 2.842171078036277e-17. Epoch 465/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 466/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 467/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 468/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 469/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 470/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 471/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 472/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 473/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 474/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00474: ReduceLROnPlateau reducing learning rate to 1.4210855390181384e-17. Epoch 475/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 476/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 477/1000 231/231 [==============================] - 0s 69us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 478/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 479/1000 231/231 [==============================] - 0s 125us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 480/1000 231/231 [==============================] - ETA: 0s - loss: 0.4785 - accuracy: 0.78 - 0s 143us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 481/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 482/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 483/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 484/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00484: ReduceLROnPlateau reducing learning rate to 7.105427695090692e-18. Epoch 485/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 486/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 487/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 488/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 489/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 490/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 491/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 492/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 493/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 494/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00494: ReduceLROnPlateau reducing learning rate to 3.552713847545346e-18. Epoch 495/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 496/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 497/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 498/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 499/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 500/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 501/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 502/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 503/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 504/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00504: ReduceLROnPlateau reducing learning rate to 1.776356923772673e-18. Epoch 505/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 506/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 507/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 508/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 509/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 510/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 511/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 512/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 513/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 514/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00514: ReduceLROnPlateau reducing learning rate to 8.881784618863365e-19. Epoch 515/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 516/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 517/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 518/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 519/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 520/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 521/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 522/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 523/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 524/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00524: ReduceLROnPlateau reducing learning rate to 4.440892309431682e-19. Epoch 525/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 526/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 527/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 528/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 529/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 530/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 531/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 532/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 533/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 534/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00534: ReduceLROnPlateau reducing learning rate to 2.220446154715841e-19. Epoch 535/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 536/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 537/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 538/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 539/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 540/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 541/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 542/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 543/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 544/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00544: ReduceLROnPlateau reducing learning rate to 1.1102230773579206e-19. Epoch 545/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 546/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 547/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 548/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 549/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 550/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 551/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 552/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 553/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 554/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00554: ReduceLROnPlateau reducing learning rate to 5.551115386789603e-20. Epoch 555/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 556/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 557/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 558/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 559/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 560/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 561/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 562/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 563/1000 231/231 [==============================] - 0s 125us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 564/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00564: ReduceLROnPlateau reducing learning rate to 2.7755576933948015e-20. Epoch 565/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 566/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 567/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 568/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 569/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 570/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 571/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 572/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 573/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 574/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00574: ReduceLROnPlateau reducing learning rate to 1.3877788466974007e-20. Epoch 575/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 576/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 577/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 578/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 579/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 580/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 581/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 582/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 583/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 584/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00584: ReduceLROnPlateau reducing learning rate to 6.938894233487004e-21. Epoch 585/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 586/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 587/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 588/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 589/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 590/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 591/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 592/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 593/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 594/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00594: ReduceLROnPlateau reducing learning rate to 3.469447116743502e-21. Epoch 595/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 596/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 597/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 598/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 599/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 600/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 601/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 602/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 603/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 604/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00604: ReduceLROnPlateau reducing learning rate to 1.734723558371751e-21. Epoch 605/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 606/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 607/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 608/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 609/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 610/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 611/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 612/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 613/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 614/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00614: ReduceLROnPlateau reducing learning rate to 8.673617791858755e-22. Epoch 615/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 616/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 617/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 618/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 619/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 620/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 621/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 622/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 623/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 624/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00624: ReduceLROnPlateau reducing learning rate to 4.336808895929377e-22. Epoch 625/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 626/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 627/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 628/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 629/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 630/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 631/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 632/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 633/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 634/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00634: ReduceLROnPlateau reducing learning rate to 2.1684044479646887e-22. Epoch 635/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 636/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 637/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 638/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 639/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 640/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 641/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 642/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 643/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 644/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00644: ReduceLROnPlateau reducing learning rate to 1.0842022239823443e-22. Epoch 645/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 646/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 647/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 648/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 649/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 650/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 651/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 652/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 653/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 654/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00654: ReduceLROnPlateau reducing learning rate to 5.421011119911722e-23. Epoch 655/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 656/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 657/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 658/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 659/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 660/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 661/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 662/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 663/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 664/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00664: ReduceLROnPlateau reducing learning rate to 2.710505559955861e-23. Epoch 665/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 666/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 667/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 668/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 669/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 670/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 671/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 672/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 673/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 674/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00674: ReduceLROnPlateau reducing learning rate to 1.3552527799779304e-23. Epoch 675/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 676/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 677/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 678/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 679/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 680/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 681/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 682/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 683/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 684/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00684: ReduceLROnPlateau reducing learning rate to 6.776263899889652e-24. Epoch 685/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 686/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 687/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 688/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 689/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 690/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 691/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 692/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 693/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 694/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00694: ReduceLROnPlateau reducing learning rate to 3.388131949944826e-24. Epoch 695/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 696/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 697/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 698/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 699/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 700/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 701/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 702/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 703/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 704/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00704: ReduceLROnPlateau reducing learning rate to 1.694065974972413e-24. Epoch 705/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 706/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 707/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 708/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 709/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 710/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 711/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 712/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 713/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 714/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00714: ReduceLROnPlateau reducing learning rate to 8.470329874862065e-25. Epoch 715/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 716/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 717/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 718/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 719/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 720/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 721/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 722/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 723/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 724/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00724: ReduceLROnPlateau reducing learning rate to 4.2351649374310325e-25. Epoch 725/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 726/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 727/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 728/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 729/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 730/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 731/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 732/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 733/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 734/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00734: ReduceLROnPlateau reducing learning rate to 2.1175824687155163e-25. Epoch 735/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 736/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 737/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 738/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 739/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 740/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 741/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 742/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 743/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 744/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00744: ReduceLROnPlateau reducing learning rate to 1.0587912343577581e-25. Epoch 745/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 746/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 747/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 748/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 749/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 750/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 751/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 752/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 753/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 754/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00754: ReduceLROnPlateau reducing learning rate to 5.293956171788791e-26. Epoch 755/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 756/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 757/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 758/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 759/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 760/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 761/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 762/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 763/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 764/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00764: ReduceLROnPlateau reducing learning rate to 2.6469780858943953e-26. Epoch 765/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 766/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 767/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 768/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 769/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 770/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 771/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 772/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 773/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 774/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00774: ReduceLROnPlateau reducing learning rate to 1.3234890429471977e-26. Epoch 775/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 776/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 777/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 778/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 779/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 780/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 781/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 782/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 783/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 784/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00784: ReduceLROnPlateau reducing learning rate to 6.617445214735988e-27. Epoch 785/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 786/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 787/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 788/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 789/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 790/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 791/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 792/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 793/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 794/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00794: ReduceLROnPlateau reducing learning rate to 3.308722607367994e-27. Epoch 795/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 796/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 797/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 798/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 799/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 800/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 801/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 802/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 803/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 804/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00804: ReduceLROnPlateau reducing learning rate to 1.654361303683997e-27. Epoch 805/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 806/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 807/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 808/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 809/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 810/1000 231/231 [==============================] - 0s 130us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 811/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 812/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 813/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 814/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00814: ReduceLROnPlateau reducing learning rate to 8.271806518419985e-28. Epoch 815/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 816/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 817/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 818/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 819/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 820/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 821/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 822/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 823/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 824/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00824: ReduceLROnPlateau reducing learning rate to 4.135903259209993e-28. Epoch 825/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 826/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 827/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 828/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 829/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 830/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 831/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 832/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 833/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 834/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00834: ReduceLROnPlateau reducing learning rate to 2.0679516296049964e-28. Epoch 835/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 836/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 837/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 838/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 839/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 840/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 841/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 842/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 843/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 844/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00844: ReduceLROnPlateau reducing learning rate to 1.0339758148024982e-28. Epoch 845/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 846/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 847/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 848/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 849/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 850/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 851/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 852/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 853/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 854/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00854: ReduceLROnPlateau reducing learning rate to 5.169879074012491e-29. Epoch 855/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 856/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 857/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 858/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 859/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 860/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 861/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 862/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 863/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 864/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00864: ReduceLROnPlateau reducing learning rate to 2.5849395370062454e-29. Epoch 865/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 866/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 867/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 868/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 869/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 870/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 871/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 872/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 873/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 874/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00874: ReduceLROnPlateau reducing learning rate to 1.2924697685031227e-29. Epoch 875/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 876/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 877/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 878/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 879/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 880/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 881/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 882/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 883/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 884/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00884: ReduceLROnPlateau reducing learning rate to 6.462348842515614e-30. Epoch 885/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 886/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 887/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 888/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 889/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 890/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 891/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 892/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 893/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 894/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00894: ReduceLROnPlateau reducing learning rate to 3.231174421257807e-30. Epoch 895/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 896/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 897/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 898/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 899/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 900/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 901/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 902/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 903/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 904/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00904: ReduceLROnPlateau reducing learning rate to 1.6155872106289034e-30. Epoch 905/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 906/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 907/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 908/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 909/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 910/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 911/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 912/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 913/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 914/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00914: ReduceLROnPlateau reducing learning rate to 8.077936053144517e-31. Epoch 915/1000 231/231 [==============================] - 0s 100us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 916/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 917/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 918/1000 231/231 [==============================] - 0s 130us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 919/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 920/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 921/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 922/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 923/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 924/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00924: ReduceLROnPlateau reducing learning rate to 4.0389680265722585e-31. Epoch 925/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 926/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 927/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 928/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 929/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 930/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 931/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 932/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 933/1000 231/231 [==============================] - 0s 121us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 934/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00934: ReduceLROnPlateau reducing learning rate to 2.0194840132861292e-31. Epoch 935/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 936/1000 231/231 [==============================] - 0s 143us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 937/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 938/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 939/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 940/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 941/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 942/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 943/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 944/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00944: ReduceLROnPlateau reducing learning rate to 1.0097420066430646e-31. Epoch 945/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 946/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 947/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 948/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 949/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 950/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 951/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 952/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 953/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 954/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00954: ReduceLROnPlateau reducing learning rate to 5.048710033215323e-32. Epoch 955/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 956/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 957/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 958/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 959/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 960/1000 231/231 [==============================] - 0s 108us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 961/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 962/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 963/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 964/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00964: ReduceLROnPlateau reducing learning rate to 2.5243550166076616e-32. Epoch 965/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 966/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 967/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 968/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 969/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 970/1000 231/231 [==============================] - 0s 117us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 971/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 972/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 973/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 974/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00974: ReduceLROnPlateau reducing learning rate to 1.2621775083038308e-32. Epoch 975/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 976/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 977/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 978/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 979/1000 231/231 [==============================] - 0s 78us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 980/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 981/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 982/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 983/1000 231/231 [==============================] - 0s 112us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 984/1000 231/231 [==============================] - 0s 104us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00984: ReduceLROnPlateau reducing learning rate to 6.310887541519154e-33. Epoch 985/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 986/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 987/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 988/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 989/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 990/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 991/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 992/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 993/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 994/1000 231/231 [==============================] - 0s 74us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 00994: ReduceLROnPlateau reducing learning rate to 3.155443770759577e-33. Epoch 995/1000 231/231 [==============================] - 0s 95us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 996/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 997/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 998/1000 231/231 [==============================] - 0s 91us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 999/1000 231/231 [==============================] - 0s 87us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436 Epoch 1000/1000 231/231 [==============================] - 0s 82us/step - loss: 0.4221 - accuracy: 0.8139 - val_loss: 0.5342 - val_accuracy: 0.7436
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 2000)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
78/78 [==============================] - 0s 77us/step test loss: 0.5342327150014731, test accuracy: 0.7435897588729858
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.655664585191793
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.07253269916765748
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.784459 | 0.109342 | 0.681608 | 1.151904 | -0.707724 | 0.736365 | 0.241404 | -0.461605 | 1.084621 | 0.123497 | -0.150398 | 1.784532 | 0.824544 |
| 1 | 0.213444 | 0.453851 | 0.215705 | 0.987439 | -1.851748 | -0.001814 | -0.218203 | 0.548263 | -0.521851 | 1.253720 | 0.882518 | -0.892913 | 0.218184 |
| 2 | 1.478029 | 0.664210 | 0.484232 | 0.450284 | -0.427587 | -0.448079 | -0.195272 | -0.610679 | -0.675633 | 0.759098 | -0.107303 | -0.440790 | -1.398093 |
| 3 | -0.846386 | -0.742706 | -0.734786 | -0.199585 | -0.328948 | -0.230911 | 0.620910 | 0.911236 | 1.274407 | 0.150882 | -0.603865 | -0.919849 | 0.386069 |
| 4 | -0.952033 | -0.794706 | -1.141199 | -0.070979 | 0.454453 | 0.544600 | 1.427005 | 1.918539 | 1.204102 | 0.076961 | -0.328712 | -1.051776 | -0.151007 |
| 5 | -1.244473 | 2.250723 | 2.321007 | 0.549219 | 1.971653 | -1.427849 | 0.392314 | -1.015093 | 0.157566 | 0.486970 | -0.455070 | 0.229936 | 0.185742 |
| 6 | -0.128652 | 0.958605 | 1.191477 | 0.705974 | 0.793937 | -0.709130 | -0.221572 | 0.922219 | 0.500475 | -0.571099 | 0.521526 | 0.567361 | -0.486761 |
| 7 | -1.148662 | 0.562286 | 0.636059 | 0.222732 | 0.741470 | 0.009108 | -0.949365 | -0.486631 | -0.519627 | -0.739600 | -0.415884 | -0.026850 | 0.290685 |
| 8 | -0.440190 | 0.461355 | -0.016542 | -0.158636 | 0.985626 | -0.417310 | 0.182134 | 0.290631 | 0.379949 | 0.069159 | 1.080013 | 0.220566 | -0.042505 |
| 9 | -0.695730 | 0.398404 | 1.069978 | -0.124019 | 0.736597 | -0.912452 | 0.673156 | 0.604840 | 0.175505 | 0.496158 | 0.541788 | 0.649837 | -0.680054 |
| 10 | -0.006662 | 0.163832 | 1.373872 | -0.095120 | 1.621755 | 1.048509 | 0.997122 | 0.721763 | 0.660834 | -1.076324 | 0.925997 | -0.147393 | -0.420465 |
| 11 | -0.771763 | -0.484525 | -0.874411 | 0.647747 | -1.241650 | 0.190918 | 0.457290 | 0.915208 | 1.999689 | 1.879761 | 0.491598 | -0.164372 | -0.560754 |
| 12 | 0.140770 | 1.869847 | -1.926303 | -2.491201 | -2.679759 | -1.527330 | -0.299345 | -0.550878 | 0.702947 | 0.143961 | 0.034796 | -0.379551 | -0.422354 |
| 13 | -1.952477 | -0.949813 | 0.063314 | 1.188657 | 1.059601 | 1.221319 | 0.070346 | 2.284107 | 2.889527 | 2.012105 | 1.053494 | -0.178905 | -2.004333 |
| 14 | -0.895529 | 0.398850 | -0.469782 | 1.216393 | 0.657294 | -0.550619 | -0.854637 | -0.815454 | 1.929689 | 1.499328 | -0.096775 | -0.174183 | -1.119396 |
| 15 | -1.161372 | 1.475106 | 1.486594 | 0.127516 | 0.213940 | 0.587080 | -0.789652 | 0.130203 | 1.199389 | 1.458358 | 0.404206 | 0.754289 | -0.784214 |
| 16 | -0.476792 | 2.179287 | 0.101035 | -1.393755 | -0.740834 | 0.589666 | 0.873850 | 0.630539 | 0.535702 | 0.387326 | -0.979677 | 0.259755 | 0.313358 |
| 17 | -0.089088 | -0.841832 | 0.674093 | -0.842623 | 0.904577 | -1.476862 | 1.853427 | -1.108621 | 0.720923 | 0.383320 | -1.842030 | 1.712321 | -1.612726 |
| 18 | -1.772732 | 0.488101 | 0.057829 | 0.041074 | 0.732429 | 1.052187 | 0.279830 | -0.350521 | -0.476338 | -0.833438 | 0.184849 | -0.055428 | 0.627307 |
| 19 | -0.640351 | 0.068493 | 0.619966 | -0.599171 | 0.860806 | -0.385120 | 1.955087 | -1.014740 | 1.224043 | 1.450896 | -2.604448 | 2.187869 | -0.464774 |
| 20 | 0.590240 | 0.699904 | -0.097902 | 0.127319 | -0.882999 | 0.319144 | -0.146142 | -0.540616 | 0.300593 | 0.688863 | 0.314647 | 0.709538 | 0.572811 |
| 21 | 0.500240 | 0.875222 | -0.833826 | 0.377484 | 0.023480 | 1.321472 | 1.094037 | 0.734507 | 0.141947 | 0.214524 | 0.508556 | -0.265911 | -0.372316 |
| 22 | -0.076653 | 0.518030 | 0.003390 | 0.452969 | -0.218736 | 0.115409 | 0.332618 | 0.611098 | 0.211893 | -0.206368 | 0.358363 | 0.614915 | 0.518172 |
| 23 | 0.010763 | -0.352873 | -0.460051 | 0.423968 | -0.228393 | -0.040296 | -0.740869 | -0.810034 | -1.379366 | -0.179024 | 0.147810 | -0.224826 | 0.615011 |
| 24 | 0.874600 | 0.173728 | -1.041125 | 0.845285 | 1.139221 | 0.264458 | -0.378878 | 0.430226 | -0.568469 | -1.237333 | 0.032074 | 0.812111 | 0.431460 |
| 25 | 0.200637 | 0.337376 | 0.022126 | 1.189135 | -0.210135 | -1.195492 | 0.067874 | 1.349711 | -0.534365 | -0.132754 | 0.055132 | 0.239009 | -0.275633 |
| 26 | 0.362627 | 0.159292 | -1.211688 | -0.555502 | 0.107540 | 0.797027 | -0.246321 | -1.113565 | -1.373054 | -2.369077 | -0.539483 | 1.032005 | 1.637730 |
| 27 | -0.504648 | -0.561515 | -2.173809 | -1.525691 | -0.810132 | -0.617474 | 0.441103 | 1.146056 | 1.464488 | -1.111032 | -0.742722 | 0.034623 | 0.200147 |
| 28 | -0.339646 | -2.140319 | -1.409226 | -0.207553 | -1.216547 | -1.135346 | -0.831817 | 1.136334 | -0.187159 | 1.388841 | 0.282573 | -0.807850 | -0.371992 |
| 29 | -1.279089 | 1.555887 | 0.890503 | 2.134195 | 0.337580 | -0.037382 | -2.046955 | -2.888113 | 1.329665 | 1.436687 | -1.576201 | 0.485256 | 1.429246 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 279 | -1.132789 | -0.931481 | -0.350024 | -0.228575 | -1.201208 | -1.044342 | 0.532403 | 1.667036 | 1.383485 | -0.967474 | -0.286625 | -1.920618 | -0.797190 |
| 280 | -0.375948 | 0.058369 | 0.489068 | 0.862825 | -1.876102 | -0.195043 | -1.163295 | 0.716190 | 0.384576 | -0.168340 | 1.542126 | -0.769460 | 0.456686 |
| 281 | 0.412883 | -1.703432 | -0.514845 | -1.382818 | -0.713972 | -0.476089 | 1.471006 | 0.826485 | 0.508608 | -1.311788 | -2.010635 | -1.122699 | -0.848851 |
| 282 | -0.152329 | -2.012108 | -0.217355 | -1.122627 | -0.851075 | 0.634424 | 1.711007 | 0.281350 | -0.565156 | -1.667195 | -1.942452 | -1.586592 | -0.485128 |
| 283 | 0.348443 | -2.381428 | 1.267515 | -1.713290 | 0.161262 | -1.589515 | 1.383857 | -0.218429 | 0.412550 | 0.382171 | -1.073499 | -1.745128 | -3.227845 |
| 284 | -0.895866 | 1.001673 | 1.059356 | 0.166883 | -0.710729 | 0.466737 | -0.857566 | -0.158962 | 0.004241 | 0.391823 | 0.576231 | 0.329506 | -1.331272 |
| 285 | 0.417102 | 1.957515 | 2.350604 | -1.125042 | -2.206390 | -0.674814 | -1.217854 | 0.372865 | 0.840465 | -0.472910 | 0.310419 | 1.379494 | 1.128412 |
| 286 | -0.900897 | -0.289100 | 0.433265 | -0.281829 | -0.379951 | 1.272236 | 0.313949 | -0.261980 | -0.053111 | 0.473694 | 0.493962 | -0.263293 | -0.657598 |
| 287 | -0.002448 | -0.853612 | 0.441903 | 0.406478 | -0.823085 | 0.590185 | -0.292046 | -0.079952 | -0.422138 | 0.579522 | -0.620415 | -0.298847 | 0.620798 |
| 288 | -0.528092 | -1.022206 | -0.348679 | 0.093718 | -1.642833 | -2.355166 | -0.992806 | -0.143423 | 0.270521 | 0.838321 | 0.843686 | 0.469574 | -0.325121 |
| 289 | -0.387248 | -1.305014 | -0.365540 | 0.202745 | -0.906016 | -1.785190 | -1.377992 | -0.544742 | -0.670979 | -0.785606 | 0.505505 | 0.502505 | -0.151297 |
| 290 | 0.425324 | -2.583173 | -2.181080 | -1.262030 | -0.179265 | 0.176164 | 1.763096 | 0.436737 | -2.048534 | -1.014266 | 1.298221 | 0.401742 | -1.080608 |
| 291 | -0.572282 | -0.375532 | -2.067885 | -0.361247 | -0.315065 | -0.671820 | -0.183865 | -0.517694 | -0.802956 | -0.951809 | 0.282442 | 0.208005 | -0.271252 |
| 292 | -0.084382 | -1.508230 | -0.105496 | -1.930204 | -1.529664 | -0.795467 | 1.273717 | -1.858542 | -0.446361 | -0.239346 | 0.154464 | -0.114937 | -1.831603 |
| 293 | -1.172703 | 0.783209 | -1.141589 | -0.982768 | -0.513216 | 0.655437 | 1.962510 | 0.628858 | 1.130028 | 1.104741 | 1.539591 | 1.547843 | -0.011302 |
| 294 | -1.293038 | 0.838303 | -1.049071 | -0.708031 | -0.779995 | 0.868108 | 1.621994 | 0.725495 | 1.173585 | 1.424395 | 1.751950 | 1.352876 | 0.339922 |
| 295 | 0.809878 | -0.351504 | -2.231752 | -0.556719 | -1.430264 | -0.357918 | -0.727837 | 1.110363 | 1.684188 | 0.429768 | 0.560061 | 0.371789 | -1.110030 |
| 296 | 0.305449 | -0.148924 | -0.727054 | -0.126830 | 0.467272 | 0.420013 | 1.212777 | 0.954055 | -0.988419 | -0.423614 | -0.047239 | 0.058678 | -0.031517 |
| 297 | 1.212224 | 1.916789 | 0.287969 | -0.073842 | 0.289112 | 0.943764 | -0.395404 | -0.380613 | 0.262567 | 0.759137 | 0.277177 | 0.493951 | 1.026995 |
| 298 | -0.121307 | 0.217217 | 0.030920 | -0.201270 | -0.752001 | -0.276070 | 0.835502 | -0.363704 | -0.641199 | 0.283313 | 0.060013 | 0.013280 | 0.477857 |
| 299 | -0.622824 | -0.595352 | 0.256282 | -0.111551 | 0.023990 | 1.221659 | 1.572998 | -0.263983 | -0.707828 | 0.707801 | 0.306249 | 1.046476 | 0.214979 |
| 300 | -0.667480 | -0.808638 | 0.730781 | 0.054549 | 0.191421 | 0.279885 | 0.088177 | 0.823617 | 0.604299 | 0.640274 | -0.360151 | 1.298688 | 0.494875 |
| 301 | 0.928382 | -2.375767 | -0.427528 | -0.852350 | -1.137004 | 1.584181 | -1.700220 | -2.060965 | -1.326622 | 0.451948 | 0.593212 | 0.152418 | -0.128797 |
| 302 | -0.483888 | 0.443846 | 0.129714 | 0.199624 | -0.106985 | 0.817702 | -0.072817 | -1.163918 | 0.545762 | -0.141320 | 0.041767 | -0.402181 | 0.061897 |
| 303 | 0.715769 | 0.780533 | 1.467750 | -0.595580 | -1.178484 | 4.014345 | -0.112339 | -1.611382 | -0.295511 | 0.032462 | 1.836607 | -4.315898 | -1.084441 |
| 304 | 0.041466 | -0.470275 | 0.234655 | 0.109532 | -0.518455 | -0.977540 | -0.613498 | -1.108545 | 0.500653 | -0.214143 | -0.033265 | -0.541673 | 0.714974 |
| 305 | 0.818747 | 0.495675 | 1.005686 | 0.967334 | 0.505171 | -0.579478 | -0.847677 | 1.574323 | 1.544556 | 0.412556 | -0.972040 | 0.290457 | 0.289042 |
| 306 | 1.062928 | -1.149587 | 1.951840 | -0.065775 | 0.546680 | 0.994901 | -1.817826 | 2.109742 | 0.264443 | 0.505287 | -0.757462 | 0.578677 | 0.222503 |
| 307 | -0.701621 | -0.049803 | -0.719153 | -0.048069 | 1.223251 | 1.913492 | 0.887449 | 0.038186 | 0.546172 | -0.568362 | -1.091833 | -0.250367 | 0.831399 |
| 308 | -0.079821 | 0.796085 | -0.215763 | -1.396439 | -0.133350 | 0.582037 | 2.442796 | 0.743250 | -1.182753 | -0.723658 | -0.879934 | -2.498899 | -1.532262 |
309 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[4016.9999999999995, 3599.8293806720085, 3349.349727264702, 3159.6428991584926, 3002.905895600155, 2909.3645052598604, 2804.695882234172, 2720.275460001156, 2622.695881163609, 2543.6022931320426, 2484.176525692807, 2436.6681239209124, 2402.363548718592, 2338.201438573343]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82b4d8dd8>]
K=2
kmeans_mfcc = KMeans(n_clusters=2, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=2, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1,
1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1,
0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0,
0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1,
1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1,
0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1,
1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0,
0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1,
1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1,
0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0,
0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1,
1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1,
1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1,
0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1,
1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1,
1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
0])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.784459 | 0.109342 | 0.681608 | 1.151904 | -0.707724 | 0.736365 | 0.241404 | -0.461605 | 1.084621 | 0.123497 | -0.150398 | 1.784532 | 0.824544 | 1 | 0 |
| 1 | 0.213444 | 0.453851 | 0.215705 | 0.987439 | -1.851748 | -0.001814 | -0.218203 | 0.548263 | -0.521851 | 1.253720 | 0.882518 | -0.892913 | 0.218184 | 1 | 0 |
| 2 | 1.478029 | 0.664210 | 0.484232 | 0.450284 | -0.427587 | -0.448079 | -0.195272 | -0.610679 | -0.675633 | 0.759098 | -0.107303 | -0.440790 | -1.398093 | 0 | 0 |
| 3 | -0.846386 | -0.742706 | -0.734786 | -0.199585 | -0.328948 | -0.230911 | 0.620910 | 0.911236 | 1.274407 | 0.150882 | -0.603865 | -0.919849 | 0.386069 | 0 | 0 |
| 4 | -0.952033 | -0.794706 | -1.141199 | -0.070979 | 0.454453 | 0.544600 | 1.427005 | 1.918539 | 1.204102 | 0.076961 | -0.328712 | -1.051776 | -0.151007 | 0 | 0 |
| 5 | -1.244473 | 2.250723 | 2.321007 | 0.549219 | 1.971653 | -1.427849 | 0.392314 | -1.015093 | 0.157566 | 0.486970 | -0.455070 | 0.229936 | 0.185742 | 1 | 0 |
| 6 | -0.128652 | 0.958605 | 1.191477 | 0.705974 | 0.793937 | -0.709130 | -0.221572 | 0.922219 | 0.500475 | -0.571099 | 0.521526 | 0.567361 | -0.486761 | 1 | 0 |
| 7 | -1.148662 | 0.562286 | 0.636059 | 0.222732 | 0.741470 | 0.009108 | -0.949365 | -0.486631 | -0.519627 | -0.739600 | -0.415884 | -0.026850 | 0.290685 | 1 | 0 |
| 8 | -0.440190 | 0.461355 | -0.016542 | -0.158636 | 0.985626 | -0.417310 | 0.182134 | 0.290631 | 0.379949 | 0.069159 | 1.080013 | 0.220566 | -0.042505 | 1 | 0 |
| 9 | -0.695730 | 0.398404 | 1.069978 | -0.124019 | 0.736597 | -0.912452 | 0.673156 | 0.604840 | 0.175505 | 0.496158 | 0.541788 | 0.649837 | -0.680054 | 1 | 0 |
| 10 | -0.006662 | 0.163832 | 1.373872 | -0.095120 | 1.621755 | 1.048509 | 0.997122 | 0.721763 | 0.660834 | -1.076324 | 0.925997 | -0.147393 | -0.420465 | 1 | 0 |
| 11 | -0.771763 | -0.484525 | -0.874411 | 0.647747 | -1.241650 | 0.190918 | 0.457290 | 0.915208 | 1.999689 | 1.879761 | 0.491598 | -0.164372 | -0.560754 | 1 | 0 |
| 12 | 0.140770 | 1.869847 | -1.926303 | -2.491201 | -2.679759 | -1.527330 | -0.299345 | -0.550878 | 0.702947 | 0.143961 | 0.034796 | -0.379551 | -0.422354 | 0 | 0 |
| 13 | -1.952477 | -0.949813 | 0.063314 | 1.188657 | 1.059601 | 1.221319 | 0.070346 | 2.284107 | 2.889527 | 2.012105 | 1.053494 | -0.178905 | -2.004333 | 1 | 0 |
| 14 | -0.895529 | 0.398850 | -0.469782 | 1.216393 | 0.657294 | -0.550619 | -0.854637 | -0.815454 | 1.929689 | 1.499328 | -0.096775 | -0.174183 | -1.119396 | 1 | 0 |
| 15 | -1.161372 | 1.475106 | 1.486594 | 0.127516 | 0.213940 | 0.587080 | -0.789652 | 0.130203 | 1.199389 | 1.458358 | 0.404206 | 0.754289 | -0.784214 | 1 | 0 |
| 16 | -0.476792 | 2.179287 | 0.101035 | -1.393755 | -0.740834 | 0.589666 | 0.873850 | 0.630539 | 0.535702 | 0.387326 | -0.979677 | 0.259755 | 0.313358 | 1 | 0 |
| 17 | -0.089088 | -0.841832 | 0.674093 | -0.842623 | 0.904577 | -1.476862 | 1.853427 | -1.108621 | 0.720923 | 0.383320 | -1.842030 | 1.712321 | -1.612726 | 0 | 0 |
| 18 | -1.772732 | 0.488101 | 0.057829 | 0.041074 | 0.732429 | 1.052187 | 0.279830 | -0.350521 | -0.476338 | -0.833438 | 0.184849 | -0.055428 | 0.627307 | 1 | 0 |
| 19 | -0.640351 | 0.068493 | 0.619966 | -0.599171 | 0.860806 | -0.385120 | 1.955087 | -1.014740 | 1.224043 | 1.450896 | -2.604448 | 2.187869 | -0.464774 | 1 | 0 |
| 20 | 0.590240 | 0.699904 | -0.097902 | 0.127319 | -0.882999 | 0.319144 | -0.146142 | -0.540616 | 0.300593 | 0.688863 | 0.314647 | 0.709538 | 0.572811 | 1 | 0 |
| 21 | 0.500240 | 0.875222 | -0.833826 | 0.377484 | 0.023480 | 1.321472 | 1.094037 | 0.734507 | 0.141947 | 0.214524 | 0.508556 | -0.265911 | -0.372316 | 1 | 0 |
| 22 | -0.076653 | 0.518030 | 0.003390 | 0.452969 | -0.218736 | 0.115409 | 0.332618 | 0.611098 | 0.211893 | -0.206368 | 0.358363 | 0.614915 | 0.518172 | 1 | 0 |
| 23 | 0.010763 | -0.352873 | -0.460051 | 0.423968 | -0.228393 | -0.040296 | -0.740869 | -0.810034 | -1.379366 | -0.179024 | 0.147810 | -0.224826 | 0.615011 | 1 | 0 |
| 24 | 0.874600 | 0.173728 | -1.041125 | 0.845285 | 1.139221 | 0.264458 | -0.378878 | 0.430226 | -0.568469 | -1.237333 | 0.032074 | 0.812111 | 0.431460 | 1 | 0 |
| 25 | 0.200637 | 0.337376 | 0.022126 | 1.189135 | -0.210135 | -1.195492 | 0.067874 | 1.349711 | -0.534365 | -0.132754 | 0.055132 | 0.239009 | -0.275633 | 1 | 0 |
| 26 | 0.362627 | 0.159292 | -1.211688 | -0.555502 | 0.107540 | 0.797027 | -0.246321 | -1.113565 | -1.373054 | -2.369077 | -0.539483 | 1.032005 | 1.637730 | 1 | 0 |
| 27 | -0.504648 | -0.561515 | -2.173809 | -1.525691 | -0.810132 | -0.617474 | 0.441103 | 1.146056 | 1.464488 | -1.111032 | -0.742722 | 0.034623 | 0.200147 | 0 | 0 |
| 28 | -0.339646 | -2.140319 | -1.409226 | -0.207553 | -1.216547 | -1.135346 | -0.831817 | 1.136334 | -0.187159 | 1.388841 | 0.282573 | -0.807850 | -0.371992 | 0 | 0 |
| 29 | -1.279089 | 1.555887 | 0.890503 | 2.134195 | 0.337580 | -0.037382 | -2.046955 | -2.888113 | 1.329665 | 1.436687 | -1.576201 | 0.485256 | 1.429246 | 1 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 279 | -1.132789 | -0.931481 | -0.350024 | -0.228575 | -1.201208 | -1.044342 | 0.532403 | 1.667036 | 1.383485 | -0.967474 | -0.286625 | -1.920618 | -0.797190 | 0 | 1 |
| 280 | -0.375948 | 0.058369 | 0.489068 | 0.862825 | -1.876102 | -0.195043 | -1.163295 | 0.716190 | 0.384576 | -0.168340 | 1.542126 | -0.769460 | 0.456686 | 1 | 1 |
| 281 | 0.412883 | -1.703432 | -0.514845 | -1.382818 | -0.713972 | -0.476089 | 1.471006 | 0.826485 | 0.508608 | -1.311788 | -2.010635 | -1.122699 | -0.848851 | 0 | 1 |
| 282 | -0.152329 | -2.012108 | -0.217355 | -1.122627 | -0.851075 | 0.634424 | 1.711007 | 0.281350 | -0.565156 | -1.667195 | -1.942452 | -1.586592 | -0.485128 | 0 | 1 |
| 283 | 0.348443 | -2.381428 | 1.267515 | -1.713290 | 0.161262 | -1.589515 | 1.383857 | -0.218429 | 0.412550 | 0.382171 | -1.073499 | -1.745128 | -3.227845 | 0 | 1 |
| 284 | -0.895866 | 1.001673 | 1.059356 | 0.166883 | -0.710729 | 0.466737 | -0.857566 | -0.158962 | 0.004241 | 0.391823 | 0.576231 | 0.329506 | -1.331272 | 1 | 1 |
| 285 | 0.417102 | 1.957515 | 2.350604 | -1.125042 | -2.206390 | -0.674814 | -1.217854 | 0.372865 | 0.840465 | -0.472910 | 0.310419 | 1.379494 | 1.128412 | 1 | 1 |
| 286 | -0.900897 | -0.289100 | 0.433265 | -0.281829 | -0.379951 | 1.272236 | 0.313949 | -0.261980 | -0.053111 | 0.473694 | 0.493962 | -0.263293 | -0.657598 | 1 | 1 |
| 287 | -0.002448 | -0.853612 | 0.441903 | 0.406478 | -0.823085 | 0.590185 | -0.292046 | -0.079952 | -0.422138 | 0.579522 | -0.620415 | -0.298847 | 0.620798 | 1 | 1 |
| 288 | -0.528092 | -1.022206 | -0.348679 | 0.093718 | -1.642833 | -2.355166 | -0.992806 | -0.143423 | 0.270521 | 0.838321 | 0.843686 | 0.469574 | -0.325121 | 0 | 1 |
| 289 | -0.387248 | -1.305014 | -0.365540 | 0.202745 | -0.906016 | -1.785190 | -1.377992 | -0.544742 | -0.670979 | -0.785606 | 0.505505 | 0.502505 | -0.151297 | 0 | 1 |
| 290 | 0.425324 | -2.583173 | -2.181080 | -1.262030 | -0.179265 | 0.176164 | 1.763096 | 0.436737 | -2.048534 | -1.014266 | 1.298221 | 0.401742 | -1.080608 | 0 | 1 |
| 291 | -0.572282 | -0.375532 | -2.067885 | -0.361247 | -0.315065 | -0.671820 | -0.183865 | -0.517694 | -0.802956 | -0.951809 | 0.282442 | 0.208005 | -0.271252 | 0 | 1 |
| 292 | -0.084382 | -1.508230 | -0.105496 | -1.930204 | -1.529664 | -0.795467 | 1.273717 | -1.858542 | -0.446361 | -0.239346 | 0.154464 | -0.114937 | -1.831603 | 0 | 1 |
| 293 | -1.172703 | 0.783209 | -1.141589 | -0.982768 | -0.513216 | 0.655437 | 1.962510 | 0.628858 | 1.130028 | 1.104741 | 1.539591 | 1.547843 | -0.011302 | 1 | 1 |
| 294 | -1.293038 | 0.838303 | -1.049071 | -0.708031 | -0.779995 | 0.868108 | 1.621994 | 0.725495 | 1.173585 | 1.424395 | 1.751950 | 1.352876 | 0.339922 | 1 | 1 |
| 295 | 0.809878 | -0.351504 | -2.231752 | -0.556719 | -1.430264 | -0.357918 | -0.727837 | 1.110363 | 1.684188 | 0.429768 | 0.560061 | 0.371789 | -1.110030 | 0 | 1 |
| 296 | 0.305449 | -0.148924 | -0.727054 | -0.126830 | 0.467272 | 0.420013 | 1.212777 | 0.954055 | -0.988419 | -0.423614 | -0.047239 | 0.058678 | -0.031517 | 0 | 1 |
| 297 | 1.212224 | 1.916789 | 0.287969 | -0.073842 | 0.289112 | 0.943764 | -0.395404 | -0.380613 | 0.262567 | 0.759137 | 0.277177 | 0.493951 | 1.026995 | 1 | 1 |
| 298 | -0.121307 | 0.217217 | 0.030920 | -0.201270 | -0.752001 | -0.276070 | 0.835502 | -0.363704 | -0.641199 | 0.283313 | 0.060013 | 0.013280 | 0.477857 | 1 | 1 |
| 299 | -0.622824 | -0.595352 | 0.256282 | -0.111551 | 0.023990 | 1.221659 | 1.572998 | -0.263983 | -0.707828 | 0.707801 | 0.306249 | 1.046476 | 0.214979 | 1 | 1 |
| 300 | -0.667480 | -0.808638 | 0.730781 | 0.054549 | 0.191421 | 0.279885 | 0.088177 | 0.823617 | 0.604299 | 0.640274 | -0.360151 | 1.298688 | 0.494875 | 1 | 1 |
| 301 | 0.928382 | -2.375767 | -0.427528 | -0.852350 | -1.137004 | 1.584181 | -1.700220 | -2.060965 | -1.326622 | 0.451948 | 0.593212 | 0.152418 | -0.128797 | 0 | 1 |
| 302 | -0.483888 | 0.443846 | 0.129714 | 0.199624 | -0.106985 | 0.817702 | -0.072817 | -1.163918 | 0.545762 | -0.141320 | 0.041767 | -0.402181 | 0.061897 | 1 | 1 |
| 303 | 0.715769 | 0.780533 | 1.467750 | -0.595580 | -1.178484 | 4.014345 | -0.112339 | -1.611382 | -0.295511 | 0.032462 | 1.836607 | -4.315898 | -1.084441 | 1 | 1 |
| 304 | 0.041466 | -0.470275 | 0.234655 | 0.109532 | -0.518455 | -0.977540 | -0.613498 | -1.108545 | 0.500653 | -0.214143 | -0.033265 | -0.541673 | 0.714974 | 1 | 1 |
| 305 | 0.818747 | 0.495675 | 1.005686 | 0.967334 | 0.505171 | -0.579478 | -0.847677 | 1.574323 | 1.544556 | 0.412556 | -0.972040 | 0.290457 | 0.289042 | 1 | 1 |
| 306 | 1.062928 | -1.149587 | 1.951840 | -0.065775 | 0.546680 | 0.994901 | -1.817826 | 2.109742 | 0.264443 | 0.505287 | -0.757462 | 0.578677 | 0.222503 | 1 | 1 |
| 307 | -0.701621 | -0.049803 | -0.719153 | -0.048069 | 1.223251 | 1.913492 | 0.887449 | 0.038186 | 0.546172 | -0.568362 | -1.091833 | -0.250367 | 0.831399 | 1 | 1 |
| 308 | -0.079821 | 0.796085 | -0.215763 | -1.396439 | -0.133350 | 0.582037 | 2.442796 | 0.743250 | -1.182753 | -0.723658 | -0.879934 | -2.498899 | -1.532262 | 0 | 1 |
309 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82b505400>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[3]))
X = df_n_ps_std_mfcc[3].drop(columns='Cluster')
y = df_n_ps[3]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(139, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (30, 20, 10), 'learning_rate_init': 0.003, 'max_iter': 75}, que permiten obtener un Accuracy de 86.33% y un Kappa del 66.54
Tiempo total: 21.38 minutos
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_4" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_4 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_10 (Dense) (None, 30) 420 _________________________________________________________________ dense_11 (Dense) (None, 20) 620 _________________________________________________________________ dense_12 (Dense) (None, 10) 210 _________________________________________________________________ dense_13 (Dense) (None, 1) 11 ================================================================= Total params: 1,261 Trainable params: 1,261 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 139 samples, validate on 47 samples Epoch 1/75 139/139 [==============================] - 0s 2ms/step - loss: 0.7120 - accuracy: 0.4604 - val_loss: 0.6643 - val_accuracy: 0.6383 Epoch 2/75 139/139 [==============================] - 0s 101us/step - loss: 0.5930 - accuracy: 0.7410 - val_loss: 0.6515 - val_accuracy: 0.6809 Epoch 3/75 139/139 [==============================] - 0s 93us/step - loss: 0.5191 - accuracy: 0.7626 - val_loss: 0.6451 - val_accuracy: 0.7234 Epoch 4/75 139/139 [==============================] - 0s 86us/step - loss: 0.4626 - accuracy: 0.7842 - val_loss: 0.6396 - val_accuracy: 0.7872 Epoch 5/75 139/139 [==============================] - 0s 86us/step - loss: 0.4188 - accuracy: 0.8201 - val_loss: 0.6337 - val_accuracy: 0.7660 Epoch 6/75 139/139 [==============================] - 0s 93us/step - loss: 0.3836 - accuracy: 0.8489 - val_loss: 0.6403 - val_accuracy: 0.7660 Epoch 7/75 139/139 [==============================] - 0s 101us/step - loss: 0.3544 - accuracy: 0.8705 - val_loss: 0.6423 - val_accuracy: 0.7660 Epoch 8/75 139/139 [==============================] - 0s 101us/step - loss: 0.3328 - accuracy: 0.8705 - val_loss: 0.6390 - val_accuracy: 0.7660 Epoch 9/75 139/139 [==============================] - 0s 101us/step - loss: 0.3117 - accuracy: 0.8777 - val_loss: 0.6345 - val_accuracy: 0.7660 Epoch 10/75 139/139 [==============================] - 0s 93us/step - loss: 0.3017 - accuracy: 0.8705 - val_loss: 0.6233 - val_accuracy: 0.7660 Epoch 11/75 139/139 [==============================] - 0s 129us/step - loss: 0.2924 - accuracy: 0.8849 - val_loss: 0.6335 - val_accuracy: 0.7872 Epoch 12/75 139/139 [==============================] - 0s 93us/step - loss: 0.2825 - accuracy: 0.8777 - val_loss: 0.6348 - val_accuracy: 0.7872 Epoch 13/75 139/139 [==============================] - 0s 86us/step - loss: 0.2727 - accuracy: 0.8777 - val_loss: 0.6450 - val_accuracy: 0.7872 Epoch 14/75 139/139 [==============================] - 0s 93us/step - loss: 0.2655 - accuracy: 0.8993 - val_loss: 0.6622 - val_accuracy: 0.7872 Epoch 00014: ReduceLROnPlateau reducing learning rate to 0.001500000013038516. Epoch 15/75 139/139 [==============================] - 0s 93us/step - loss: 0.2583 - accuracy: 0.9065 - val_loss: 0.6709 - val_accuracy: 0.7872 Epoch 16/75 139/139 [==============================] - 0s 86us/step - loss: 0.2544 - accuracy: 0.9065 - val_loss: 0.6659 - val_accuracy: 0.7872 Epoch 17/75 139/139 [==============================] - 0s 101us/step - loss: 0.2510 - accuracy: 0.9065 - val_loss: 0.6681 - val_accuracy: 0.7660 Epoch 18/75 139/139 [==============================] - 0s 93us/step - loss: 0.2473 - accuracy: 0.9137 - val_loss: 0.6822 - val_accuracy: 0.7660 Epoch 19/75 139/139 [==============================] - 0s 86us/step - loss: 0.2434 - accuracy: 0.9137 - val_loss: 0.6909 - val_accuracy: 0.7660 Epoch 20/75 139/139 [==============================] - 0s 79us/step - loss: 0.2402 - accuracy: 0.9137 - val_loss: 0.6924 - val_accuracy: 0.7660 Epoch 21/75 139/139 [==============================] - 0s 137us/step - loss: 0.2373 - accuracy: 0.9137 - val_loss: 0.6904 - val_accuracy: 0.7660 Epoch 22/75 139/139 [==============================] - 0s 101us/step - loss: 0.2347 - accuracy: 0.9137 - val_loss: 0.6874 - val_accuracy: 0.7660 Epoch 23/75 139/139 [==============================] - 0s 93us/step - loss: 0.2313 - accuracy: 0.9137 - val_loss: 0.6854 - val_accuracy: 0.7660 Epoch 24/75 139/139 [==============================] - 0s 86us/step - loss: 0.2278 - accuracy: 0.9137 - val_loss: 0.6835 - val_accuracy: 0.7660 Epoch 00024: ReduceLROnPlateau reducing learning rate to 0.000750000006519258. Epoch 25/75 139/139 [==============================] - 0s 86us/step - loss: 0.2249 - accuracy: 0.9137 - val_loss: 0.6845 - val_accuracy: 0.7660 Epoch 26/75 139/139 [==============================] - 0s 101us/step - loss: 0.2242 - accuracy: 0.9137 - val_loss: 0.6798 - val_accuracy: 0.7660 Epoch 27/75 139/139 [==============================] - 0s 93us/step - loss: 0.2229 - accuracy: 0.9137 - val_loss: 0.6763 - val_accuracy: 0.7660 Epoch 28/75 139/139 [==============================] - 0s 101us/step - loss: 0.2213 - accuracy: 0.9137 - val_loss: 0.6758 - val_accuracy: 0.7660 Epoch 29/75 139/139 [==============================] - 0s 86us/step - loss: 0.2198 - accuracy: 0.9137 - val_loss: 0.6758 - val_accuracy: 0.7660 Epoch 30/75 139/139 [==============================] - 0s 86us/step - loss: 0.2179 - accuracy: 0.9209 - val_loss: 0.6753 - val_accuracy: 0.7660 Epoch 31/75 139/139 [==============================] - 0s 79us/step - loss: 0.2163 - accuracy: 0.9209 - val_loss: 0.6761 - val_accuracy: 0.7660 Epoch 32/75 139/139 [==============================] - 0s 86us/step - loss: 0.2150 - accuracy: 0.9209 - val_loss: 0.6781 - val_accuracy: 0.7660 Epoch 33/75 139/139 [==============================] - 0s 122us/step - loss: 0.2136 - accuracy: 0.9281 - val_loss: 0.6806 - val_accuracy: 0.7660 Epoch 34/75 139/139 [==============================] - 0s 158us/step - loss: 0.2121 - accuracy: 0.9281 - val_loss: 0.6834 - val_accuracy: 0.7660 Epoch 00034: ReduceLROnPlateau reducing learning rate to 0.000375000003259629. Epoch 35/75 139/139 [==============================] - 0s 129us/step - loss: 0.2106 - accuracy: 0.9281 - val_loss: 0.6837 - val_accuracy: 0.7660 Epoch 36/75 139/139 [==============================] - 0s 108us/step - loss: 0.2099 - accuracy: 0.9281 - val_loss: 0.6845 - val_accuracy: 0.7660 Epoch 37/75 139/139 [==============================] - 0s 108us/step - loss: 0.2092 - accuracy: 0.9281 - val_loss: 0.6846 - val_accuracy: 0.7660 Epoch 38/75 139/139 [==============================] - 0s 93us/step - loss: 0.2085 - accuracy: 0.9281 - val_loss: 0.6834 - val_accuracy: 0.7660 Epoch 39/75 139/139 [==============================] - 0s 93us/step - loss: 0.2078 - accuracy: 0.9281 - val_loss: 0.6844 - val_accuracy: 0.7660 Epoch 40/75 139/139 [==============================] - 0s 93us/step - loss: 0.2070 - accuracy: 0.9281 - val_loss: 0.6853 - val_accuracy: 0.7660 Epoch 41/75 139/139 [==============================] - 0s 93us/step - loss: 0.2064 - accuracy: 0.9281 - val_loss: 0.6865 - val_accuracy: 0.7660 Epoch 42/75 139/139 [==============================] - 0s 93us/step - loss: 0.2055 - accuracy: 0.9281 - val_loss: 0.6884 - val_accuracy: 0.7660 Epoch 43/75 139/139 [==============================] - 0s 93us/step - loss: 0.2050 - accuracy: 0.9281 - val_loss: 0.6905 - val_accuracy: 0.7660 Epoch 44/75 139/139 [==============================] - 0s 93us/step - loss: 0.2046 - accuracy: 0.9281 - val_loss: 0.6924 - val_accuracy: 0.7660 Epoch 00044: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145. Epoch 45/75 139/139 [==============================] - 0s 129us/step - loss: 0.2036 - accuracy: 0.9281 - val_loss: 0.6908 - val_accuracy: 0.7660 Epoch 46/75 139/139 [==============================] - 0s 108us/step - loss: 0.2031 - accuracy: 0.9281 - val_loss: 0.6889 - val_accuracy: 0.7660 Epoch 47/75 139/139 [==============================] - 0s 86us/step - loss: 0.2027 - accuracy: 0.9281 - val_loss: 0.6871 - val_accuracy: 0.7660 Epoch 48/75 139/139 [==============================] - 0s 72us/step - loss: 0.2024 - accuracy: 0.9281 - val_loss: 0.6861 - val_accuracy: 0.7660 Epoch 49/75 139/139 [==============================] - 0s 93us/step - loss: 0.2021 - accuracy: 0.9281 - val_loss: 0.6861 - val_accuracy: 0.7660 Epoch 50/75 139/139 [==============================] - 0s 79us/step - loss: 0.2016 - accuracy: 0.9281 - val_loss: 0.6864 - val_accuracy: 0.7660 Epoch 51/75 139/139 [==============================] - 0s 86us/step - loss: 0.2013 - accuracy: 0.9281 - val_loss: 0.6864 - val_accuracy: 0.7660 Epoch 52/75 139/139 [==============================] - 0s 79us/step - loss: 0.2010 - accuracy: 0.9281 - val_loss: 0.6871 - val_accuracy: 0.7660 Epoch 53/75 139/139 [==============================] - 0s 79us/step - loss: 0.2006 - accuracy: 0.9281 - val_loss: 0.6876 - val_accuracy: 0.7660 Epoch 54/75 139/139 [==============================] - 0s 72us/step - loss: 0.2003 - accuracy: 0.9281 - val_loss: 0.6873 - val_accuracy: 0.7660 Epoch 00054: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05. Epoch 55/75 139/139 [==============================] - 0s 72us/step - loss: 0.1998 - accuracy: 0.9281 - val_loss: 0.6875 - val_accuracy: 0.7660 Epoch 56/75 139/139 [==============================] - 0s 122us/step - loss: 0.1997 - accuracy: 0.9281 - val_loss: 0.6872 - val_accuracy: 0.7660 Epoch 57/75 139/139 [==============================] - 0s 86us/step - loss: 0.1995 - accuracy: 0.9281 - val_loss: 0.6871 - val_accuracy: 0.7660 Epoch 58/75 139/139 [==============================] - 0s 79us/step - loss: 0.1993 - accuracy: 0.9281 - val_loss: 0.6874 - val_accuracy: 0.7660 Epoch 59/75 139/139 [==============================] - 0s 79us/step - loss: 0.1991 - accuracy: 0.9281 - val_loss: 0.6880 - val_accuracy: 0.7660 Epoch 60/75 139/139 [==============================] - 0s 72us/step - loss: 0.1990 - accuracy: 0.9281 - val_loss: 0.6874 - val_accuracy: 0.7660 Epoch 61/75 139/139 [==============================] - 0s 79us/step - loss: 0.1987 - accuracy: 0.9281 - val_loss: 0.6869 - val_accuracy: 0.7660 Epoch 62/75 139/139 [==============================] - 0s 79us/step - loss: 0.1986 - accuracy: 0.9281 - val_loss: 0.6867 - val_accuracy: 0.7660 Epoch 63/75 139/139 [==============================] - 0s 72us/step - loss: 0.1983 - accuracy: 0.9281 - val_loss: 0.6867 - val_accuracy: 0.7660 Epoch 64/75 139/139 [==============================] - 0s 79us/step - loss: 0.1981 - accuracy: 0.9281 - val_loss: 0.6868 - val_accuracy: 0.7660 Epoch 00064: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05. Epoch 65/75 139/139 [==============================] - 0s 79us/step - loss: 0.1980 - accuracy: 0.9281 - val_loss: 0.6868 - val_accuracy: 0.7660 Epoch 66/75 139/139 [==============================] - 0s 79us/step - loss: 0.1979 - accuracy: 0.9281 - val_loss: 0.6867 - val_accuracy: 0.7660 Epoch 67/75 139/139 [==============================] - 0s 79us/step - loss: 0.1978 - accuracy: 0.9281 - val_loss: 0.6866 - val_accuracy: 0.7660 Epoch 68/75 139/139 [==============================] - 0s 101us/step - loss: 0.1977 - accuracy: 0.9281 - val_loss: 0.6866 - val_accuracy: 0.7660 Epoch 69/75 139/139 [==============================] - 0s 122us/step - loss: 0.1977 - accuracy: 0.9281 - val_loss: 0.6867 - val_accuracy: 0.7660 Epoch 70/75 139/139 [==============================] - 0s 86us/step - loss: 0.1975 - accuracy: 0.9281 - val_loss: 0.6870 - val_accuracy: 0.7660 Epoch 71/75 139/139 [==============================] - 0s 86us/step - loss: 0.1974 - accuracy: 0.9281 - val_loss: 0.6871 - val_accuracy: 0.7660 Epoch 72/75 139/139 [==============================] - 0s 72us/step - loss: 0.1973 - accuracy: 0.9281 - val_loss: 0.6873 - val_accuracy: 0.7660 Epoch 73/75 139/139 [==============================] - 0s 79us/step - loss: 0.1972 - accuracy: 0.9281 - val_loss: 0.6872 - val_accuracy: 0.7660 Epoch 74/75 139/139 [==============================] - 0s 79us/step - loss: 0.1972 - accuracy: 0.9281 - val_loss: 0.6873 - val_accuracy: 0.7660 Epoch 00074: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05. Epoch 75/75 139/139 [==============================] - 0s 93us/step - loss: 0.1971 - accuracy: 0.9281 - val_loss: 0.6875 - val_accuracy: 0.7660
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 75)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
47/47 [==============================] - 0s 106us/step test loss: 0.687487561017909, test accuracy: 0.7659574747085571
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.7903225806451613
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.396732788798133
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.311006 | 1.696486 | 0.912001 | -0.211934 | -0.513557 | 1.357699 | 0.777385 | 0.508585 | -2.290902 | -2.422249 | -0.738438 | -2.221933 | -1.191363 |
| 1 | 0.947147 | -0.576741 | -1.258913 | -0.786859 | 0.887961 | -1.895175 | -0.310427 | -0.374360 | 1.478062 | 0.972075 | -1.105984 | 0.612318 | -1.486887 |
| 2 | -0.115048 | 1.257050 | 0.043002 | -2.677464 | 3.902183 | -1.091787 | 0.505797 | 2.341684 | -2.345224 | -1.678088 | -2.608854 | -2.617777 | -2.135652 |
| 3 | 0.621032 | 1.417449 | 1.399722 | -0.625673 | 1.012110 | 0.230671 | -0.287988 | 1.012771 | -2.250326 | -0.340971 | -0.353905 | -0.717440 | -0.390485 |
| 4 | 0.340978 | 1.662814 | -1.775422 | 0.156552 | 1.678811 | 0.301711 | 2.038462 | 1.511985 | 1.508787 | -2.046602 | 0.741073 | -0.282747 | -0.779814 |
| 5 | 0.426765 | -1.056701 | -1.244088 | -0.696846 | -0.372415 | -0.847420 | -0.209607 | 1.462924 | -0.541420 | 0.000628 | -1.135148 | 1.608546 | 1.709532 |
| 6 | 0.210857 | -1.779497 | -2.206121 | -0.832640 | 0.636169 | -1.979858 | -0.510102 | 1.437770 | 0.128209 | 0.025521 | 0.184211 | 2.300204 | 0.912793 |
| 7 | -0.821293 | -0.049796 | 0.237440 | 0.379918 | 0.714133 | 0.670070 | 0.122605 | -0.069298 | -0.126759 | -0.199559 | 0.547891 | -0.099623 | -0.024895 |
| 8 | 0.420103 | -0.662020 | -0.550543 | -0.566406 | -0.923203 | -0.295152 | -0.533234 | 0.927026 | 0.119135 | 0.218761 | -0.245778 | 0.627242 | 1.313952 |
| 9 | -1.436247 | 0.435343 | 2.482690 | 1.099668 | -0.392845 | 0.565039 | 0.569531 | -0.088218 | -0.131137 | -0.699769 | -0.538549 | -0.329443 | 0.942919 |
| 10 | -1.574051 | -1.334372 | -1.636184 | 1.768991 | -0.369456 | -0.008046 | -1.402331 | 0.012625 | 1.135935 | 1.623145 | -0.653935 | 0.182348 | 1.052310 |
| 11 | -1.798986 | -1.632467 | -1.314854 | 2.656006 | -0.096678 | -0.174852 | -1.748372 | 0.185804 | 0.930317 | 0.365776 | -0.676448 | 0.358271 | 1.523770 |
| 12 | -0.708207 | 0.931180 | 0.258840 | -0.189291 | -0.204832 | -0.103872 | 0.221697 | -0.231695 | -0.003439 | 0.423528 | 1.259835 | 0.119625 | -0.192417 |
| 13 | -2.007033 | -0.288096 | 0.099713 | 0.390909 | 1.333138 | -0.069950 | 0.643074 | 0.172080 | -0.109666 | 0.304475 | -1.157528 | -1.708326 | -1.420079 |
| 14 | -0.497985 | 0.020592 | -0.123619 | 0.165046 | -0.765078 | -0.465219 | 0.172533 | 0.722853 | 0.284863 | -0.035284 | 0.024769 | -0.065990 | -0.992437 |
| 15 | 1.200625 | 0.984580 | -0.234312 | 0.348855 | 0.175663 | 0.309396 | 0.390611 | -0.745912 | -0.667554 | -0.052439 | 0.119610 | -0.862930 | 0.945979 |
| 16 | 0.435253 | 3.280178 | 0.407736 | 1.143148 | 2.291571 | 0.546530 | 0.170667 | 0.427708 | -0.063936 | -0.532360 | 0.404150 | 0.415849 | 0.869331 |
| 17 | -0.398944 | 0.035026 | -1.634042 | -1.354378 | 0.854385 | 1.406182 | -0.773335 | 0.663902 | 0.928496 | 1.278830 | 0.464511 | 0.235475 | -0.040374 |
| 18 | -0.454008 | -0.234096 | -0.930672 | -0.507506 | 0.545773 | 0.437756 | 1.026910 | 0.013959 | -0.620099 | -0.593763 | 1.073690 | 0.594340 | 0.987056 |
| 19 | 0.149846 | 0.062252 | -0.002122 | 0.786346 | 0.810930 | 0.304880 | -0.882886 | -0.043156 | 2.503584 | 0.894947 | 0.394981 | 0.761651 | 0.402963 |
| 20 | -0.314274 | 0.446482 | 0.889744 | 0.891114 | 1.249237 | 0.718469 | 0.296834 | -0.831548 | -0.393364 | -0.103574 | 0.295790 | 0.092061 | 0.424633 |
| 21 | 0.659365 | 1.053258 | -0.877939 | -0.295954 | -1.122110 | -0.035202 | 1.512616 | 0.031457 | -0.700740 | -1.687204 | -1.136215 | -1.545451 | -0.082548 |
| 22 | 0.568507 | -0.357318 | -1.183577 | -0.069205 | 0.462644 | -0.956011 | 0.501504 | 0.240708 | -0.025482 | 0.416003 | 0.237690 | -0.566935 | -0.846151 |
| 23 | 0.696474 | 0.477607 | -1.637469 | -1.158983 | -2.224208 | -1.861929 | -0.176558 | 0.694585 | 0.426826 | -0.088376 | -0.335290 | 1.125320 | 0.705700 |
| 24 | -0.221795 | -0.513464 | -0.506448 | 0.594506 | 0.033232 | -1.141879 | -1.582503 | -0.081204 | -0.001962 | -0.704687 | -0.473528 | 0.580117 | 1.533686 |
| 25 | 0.036099 | -0.007586 | 0.116729 | 0.438081 | -1.526141 | -1.994283 | -1.014100 | 0.028630 | -0.553238 | -0.540795 | 0.467730 | 0.943285 | 0.498193 |
| 26 | -0.291576 | -0.372192 | -1.176599 | 0.078535 | 0.516288 | -1.851892 | -2.218803 | 0.335200 | 0.323222 | 0.006649 | 0.017717 | 0.133172 | 1.208725 |
| 27 | 0.953536 | 0.427304 | -0.554063 | 0.425439 | 1.368674 | 0.362392 | 0.477030 | -0.976616 | -0.382390 | 0.310619 | -0.903078 | -0.943886 | -0.047616 |
| 28 | -1.172014 | 1.307258 | -1.059323 | -0.655908 | 1.591107 | 0.483432 | 0.474862 | 0.348014 | -0.527448 | 0.798802 | -0.075253 | 1.943808 | 0.108268 |
| 29 | -0.954427 | 0.000731 | -0.367958 | 0.281024 | 0.303337 | 0.744504 | 1.271647 | 0.298340 | -0.057042 | -0.297712 | -0.053703 | -0.045043 | -0.561554 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 156 | 0.232363 | -1.167339 | -0.114632 | 1.240724 | -0.209611 | 0.597503 | -0.105216 | -0.393018 | -0.168804 | -0.038601 | 0.602075 | -0.482733 | 0.192333 |
| 157 | -1.686193 | -0.806140 | -0.531342 | -0.411912 | 0.312945 | 0.751058 | 0.624837 | -0.394463 | 0.549120 | -1.174079 | -1.374572 | -1.950144 | -0.652535 |
| 158 | 0.487798 | 1.116042 | -0.308817 | 0.175231 | -0.191701 | -0.682970 | 0.502123 | 0.749073 | 1.365476 | 0.198244 | 1.283992 | 0.132188 | 0.482532 |
| 159 | 1.049575 | 0.742765 | 0.000505 | 0.670386 | 0.235663 | -0.297404 | 0.891743 | 0.047729 | 0.086633 | 0.873400 | 0.552393 | 0.496793 | 0.659122 |
| 160 | 0.285967 | 0.602916 | -0.009050 | 0.802464 | 0.333031 | -1.182611 | 0.473870 | 0.896236 | 0.890391 | 0.208214 | 0.786475 | 0.044481 | -0.114927 |
| 161 | 2.568510 | -0.180837 | 0.794882 | 1.410838 | 0.898076 | 0.468184 | 0.963255 | 0.338074 | 2.081580 | 2.353196 | 0.146660 | -0.295606 | -0.020484 |
| 162 | 0.821849 | 0.906757 | 0.282262 | 0.304716 | -0.691824 | 0.772704 | 2.543328 | -0.404440 | 1.861464 | 1.635426 | 0.204673 | 0.084333 | 0.469447 |
| 163 | 2.581037 | 0.239015 | 1.212048 | 0.498566 | 0.095720 | 0.062469 | 3.463238 | 0.374969 | -0.054235 | -0.365031 | -0.169020 | 1.160964 | 0.666076 |
| 164 | -0.247271 | -0.874145 | -0.840584 | 0.233138 | 0.034101 | 0.259892 | 0.144353 | -0.570094 | 1.244117 | 0.282845 | 0.127444 | -0.721587 | -1.450860 |
| 165 | 0.188979 | -0.519200 | 0.108496 | -0.513645 | -0.637646 | 0.812515 | 0.626360 | -0.156977 | -0.092241 | -0.517923 | 0.026563 | -0.597616 | -0.101096 |
| 166 | -0.015438 | -0.656621 | -0.739614 | 0.302131 | 0.583862 | 0.465267 | 0.342075 | -0.318902 | 0.221544 | 0.654368 | 0.777463 | -0.462212 | -0.867288 |
| 167 | -1.567081 | -1.052883 | -0.417918 | 0.636963 | -0.531279 | 0.787238 | -1.913461 | -0.020653 | -0.111129 | 0.112259 | -0.380422 | 0.497894 | 0.709826 |
| 168 | -1.883530 | -0.172892 | -0.340073 | -0.255266 | -0.480237 | -0.061425 | -0.158589 | -0.308725 | -0.034923 | 0.150845 | 0.696367 | 0.704196 | 0.473391 |
| 169 | -1.577057 | -0.602693 | 0.448785 | 1.073850 | -0.714538 | 1.427240 | -1.645225 | 0.812069 | -0.019466 | -0.719024 | -0.991241 | 0.521497 | 0.461555 |
| 170 | 0.667824 | -0.298287 | -0.412356 | -1.154598 | 0.171532 | -0.341146 | -0.411827 | -1.296671 | 0.428160 | -0.233124 | 0.035111 | -0.934501 | 0.123701 |
| 171 | 0.626343 | -0.046406 | -0.168999 | -1.278941 | 0.502027 | -0.060296 | 0.062571 | -1.284727 | 0.028380 | -0.488598 | -0.255008 | -1.199407 | 0.088115 |
| 172 | -0.441233 | -0.987406 | 0.015664 | -2.982951 | -0.379800 | 1.485748 | -2.174788 | -3.521120 | -0.191502 | 2.486906 | 2.771782 | 0.789523 | 0.553119 |
| 173 | 0.331514 | 0.334707 | -0.187508 | 0.489055 | 1.133140 | 1.016598 | -0.572753 | -0.634721 | -0.567790 | -0.492410 | 0.634219 | -0.122575 | -0.205540 |
| 174 | 0.480131 | 0.345645 | 0.202709 | -0.423456 | 1.184414 | 2.116965 | -0.463050 | 0.212550 | 1.973473 | -0.996794 | 0.924229 | -0.170049 | -0.068464 |
| 175 | 1.039410 | -0.773764 | 0.113739 | -0.796036 | -1.053802 | -1.238009 | 0.153897 | 0.497600 | 1.347261 | 0.972165 | 0.993095 | 0.066125 | -0.069772 |
| 176 | 0.194148 | -0.229033 | -0.571129 | -0.704359 | -0.204400 | -0.273049 | 1.105329 | 0.052851 | -0.360196 | 0.099095 | 1.063628 | -0.266594 | 0.041526 |
| 177 | -0.169188 | 0.025195 | -0.189648 | 0.376353 | 0.802036 | -1.185140 | 0.488985 | 0.244963 | 1.305631 | 0.241661 | 0.402415 | -0.494815 | 0.252206 |
| 178 | 1.433007 | 0.217051 | -0.388425 | -1.158798 | 2.068592 | 1.278810 | -1.193547 | -0.909321 | -0.207122 | 2.062093 | 1.374797 | 0.383804 | 1.569650 |
| 179 | 1.371536 | 0.624596 | -0.082552 | 0.444824 | 1.959112 | -0.736647 | -1.448177 | 0.624897 | 1.304939 | -0.025270 | -0.519401 | 0.592135 | 1.312240 |
| 180 | 0.815061 | -1.210119 | 0.844643 | -1.152602 | -0.216878 | -1.573232 | -0.065062 | 2.136014 | -0.285964 | 1.827988 | -0.982121 | 1.139199 | 0.936226 |
| 181 | 0.814962 | -1.028970 | -1.340094 | -1.579784 | 0.774822 | -0.351654 | -2.148181 | 2.772395 | 1.638263 | -0.394371 | 1.796246 | 1.182459 | 0.824064 |
| 182 | 1.615277 | 0.706391 | -0.611277 | 0.513438 | 0.987249 | 1.226124 | 0.240966 | 0.485917 | 1.355615 | -0.480955 | -0.255325 | -0.370864 | 0.107591 |
| 183 | 0.290224 | 0.578762 | 0.024629 | 0.119894 | 0.626180 | 1.025427 | 0.180541 | -0.504388 | -1.085411 | -1.413825 | 0.811722 | 0.640653 | 0.433677 |
| 184 | 0.086408 | -1.394139 | -0.501233 | 1.251905 | -0.481983 | 0.026482 | -1.317983 | -0.580623 | -0.160381 | -0.718194 | 0.110108 | -0.183905 | 0.074891 |
| 185 | 0.024909 | -0.713904 | -1.235134 | -0.194562 | 0.155358 | -0.586587 | -0.455970 | 0.577457 | 1.172268 | 0.468799 | 0.500130 | 1.133624 | 0.192845 |
186 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[2418.0, 2172.1027618675616, 1998.025251732407, 1881.5714074571665, 1790.4988909951144, 1710.5509624175916, 1638.1160961205187, 1594.2979605381602, 1541.5219098889218, 1492.348355544395, 1435.6626781919845, 1397.526425851007, 1339.3046499586067, 1333.0833102955885]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82cc61860>]
K=3
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 2, 1, 1, 0, 2, 2, 0, 2, 0, 2, 2, 0, 1, 1, 0, 0, 2, 0, 0, 0, 1,
1, 2, 2, 2, 2, 0, 0, 1, 2, 2, 2, 1, 1, 0, 2, 2, 2, 0, 2, 1, 1, 1,
1, 2, 0, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 0, 2, 1, 2,
2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 1, 0, 2, 1, 0, 2, 1, 0, 1, 2, 2,
0, 1, 1, 0, 1, 0, 2, 2, 2, 1, 2, 2, 0, 1, 2, 1, 2, 2, 2, 1, 1, 1,
2, 2, 2, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 2, 1, 1, 0, 2, 1, 1, 0,
0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 1, 2, 2, 1, 0, 2, 2, 1, 0, 1, 0,
1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 0, 0, 2,
1, 0, 0, 0, 2, 2, 0, 0, 2, 2])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 2, 1, 1, 0, 2, 2, 0, 2, 0, 2, 2, 0, 1, 1, 0, 0, 2, 0, 0, 0, 1,
1, 2, 2, 2, 2, 0, 0, 1, 2, 2, 2, 1, 1, 0, 2, 2, 2, 0, 2, 1, 1, 1,
1, 2, 0, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0, 0, 2, 1, 2,
2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 1, 0, 2, 1, 0, 2, 1, 0, 1, 2, 2,
0, 1, 1, 0, 1, 0, 2, 2, 2, 1, 2, 2, 0, 1, 2, 1, 2, 2, 2, 1, 1, 1,
2, 2, 2, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 2, 1, 1, 0, 2, 1, 1, 0,
0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 2, 1, 2, 2, 1, 0, 2, 2, 1, 0, 1, 0,
1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 1, 1, 2, 0, 0, 2,
1, 0, 0, 0, 2, 2, 0, 0, 2, 2])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.311006 | 1.696486 | 0.912001 | -0.211934 | -0.513557 | 1.357699 | 0.777385 | 0.508585 | -2.290902 | -2.422249 | -0.738438 | -2.221933 | -1.191363 | 1 | 0 |
| 1 | 0.947147 | -0.576741 | -1.258913 | -0.786859 | 0.887961 | -1.895175 | -0.310427 | -0.374360 | 1.478062 | 0.972075 | -1.105984 | 0.612318 | -1.486887 | 2 | 0 |
| 2 | -0.115048 | 1.257050 | 0.043002 | -2.677464 | 3.902183 | -1.091787 | 0.505797 | 2.341684 | -2.345224 | -1.678088 | -2.608854 | -2.617777 | -2.135652 | 1 | 0 |
| 3 | 0.621032 | 1.417449 | 1.399722 | -0.625673 | 1.012110 | 0.230671 | -0.287988 | 1.012771 | -2.250326 | -0.340971 | -0.353905 | -0.717440 | -0.390485 | 1 | 0 |
| 4 | 0.340978 | 1.662814 | -1.775422 | 0.156552 | 1.678811 | 0.301711 | 2.038462 | 1.511985 | 1.508787 | -2.046602 | 0.741073 | -0.282747 | -0.779814 | 0 | 0 |
| 5 | 0.426765 | -1.056701 | -1.244088 | -0.696846 | -0.372415 | -0.847420 | -0.209607 | 1.462924 | -0.541420 | 0.000628 | -1.135148 | 1.608546 | 1.709532 | 2 | 0 |
| 6 | 0.210857 | -1.779497 | -2.206121 | -0.832640 | 0.636169 | -1.979858 | -0.510102 | 1.437770 | 0.128209 | 0.025521 | 0.184211 | 2.300204 | 0.912793 | 2 | 0 |
| 7 | -0.821293 | -0.049796 | 0.237440 | 0.379918 | 0.714133 | 0.670070 | 0.122605 | -0.069298 | -0.126759 | -0.199559 | 0.547891 | -0.099623 | -0.024895 | 0 | 0 |
| 8 | 0.420103 | -0.662020 | -0.550543 | -0.566406 | -0.923203 | -0.295152 | -0.533234 | 0.927026 | 0.119135 | 0.218761 | -0.245778 | 0.627242 | 1.313952 | 2 | 0 |
| 9 | -1.436247 | 0.435343 | 2.482690 | 1.099668 | -0.392845 | 0.565039 | 0.569531 | -0.088218 | -0.131137 | -0.699769 | -0.538549 | -0.329443 | 0.942919 | 0 | 0 |
| 10 | -1.574051 | -1.334372 | -1.636184 | 1.768991 | -0.369456 | -0.008046 | -1.402331 | 0.012625 | 1.135935 | 1.623145 | -0.653935 | 0.182348 | 1.052310 | 2 | 0 |
| 11 | -1.798986 | -1.632467 | -1.314854 | 2.656006 | -0.096678 | -0.174852 | -1.748372 | 0.185804 | 0.930317 | 0.365776 | -0.676448 | 0.358271 | 1.523770 | 2 | 0 |
| 12 | -0.708207 | 0.931180 | 0.258840 | -0.189291 | -0.204832 | -0.103872 | 0.221697 | -0.231695 | -0.003439 | 0.423528 | 1.259835 | 0.119625 | -0.192417 | 0 | 0 |
| 13 | -2.007033 | -0.288096 | 0.099713 | 0.390909 | 1.333138 | -0.069950 | 0.643074 | 0.172080 | -0.109666 | 0.304475 | -1.157528 | -1.708326 | -1.420079 | 1 | 0 |
| 14 | -0.497985 | 0.020592 | -0.123619 | 0.165046 | -0.765078 | -0.465219 | 0.172533 | 0.722853 | 0.284863 | -0.035284 | 0.024769 | -0.065990 | -0.992437 | 1 | 0 |
| 15 | 1.200625 | 0.984580 | -0.234312 | 0.348855 | 0.175663 | 0.309396 | 0.390611 | -0.745912 | -0.667554 | -0.052439 | 0.119610 | -0.862930 | 0.945979 | 0 | 0 |
| 16 | 0.435253 | 3.280178 | 0.407736 | 1.143148 | 2.291571 | 0.546530 | 0.170667 | 0.427708 | -0.063936 | -0.532360 | 0.404150 | 0.415849 | 0.869331 | 0 | 0 |
| 17 | -0.398944 | 0.035026 | -1.634042 | -1.354378 | 0.854385 | 1.406182 | -0.773335 | 0.663902 | 0.928496 | 1.278830 | 0.464511 | 0.235475 | -0.040374 | 2 | 0 |
| 18 | -0.454008 | -0.234096 | -0.930672 | -0.507506 | 0.545773 | 0.437756 | 1.026910 | 0.013959 | -0.620099 | -0.593763 | 1.073690 | 0.594340 | 0.987056 | 0 | 0 |
| 19 | 0.149846 | 0.062252 | -0.002122 | 0.786346 | 0.810930 | 0.304880 | -0.882886 | -0.043156 | 2.503584 | 0.894947 | 0.394981 | 0.761651 | 0.402963 | 0 | 0 |
| 20 | -0.314274 | 0.446482 | 0.889744 | 0.891114 | 1.249237 | 0.718469 | 0.296834 | -0.831548 | -0.393364 | -0.103574 | 0.295790 | 0.092061 | 0.424633 | 0 | 0 |
| 21 | 0.659365 | 1.053258 | -0.877939 | -0.295954 | -1.122110 | -0.035202 | 1.512616 | 0.031457 | -0.700740 | -1.687204 | -1.136215 | -1.545451 | -0.082548 | 1 | 0 |
| 22 | 0.568507 | -0.357318 | -1.183577 | -0.069205 | 0.462644 | -0.956011 | 0.501504 | 0.240708 | -0.025482 | 0.416003 | 0.237690 | -0.566935 | -0.846151 | 1 | 0 |
| 23 | 0.696474 | 0.477607 | -1.637469 | -1.158983 | -2.224208 | -1.861929 | -0.176558 | 0.694585 | 0.426826 | -0.088376 | -0.335290 | 1.125320 | 0.705700 | 2 | 0 |
| 24 | -0.221795 | -0.513464 | -0.506448 | 0.594506 | 0.033232 | -1.141879 | -1.582503 | -0.081204 | -0.001962 | -0.704687 | -0.473528 | 0.580117 | 1.533686 | 2 | 0 |
| 25 | 0.036099 | -0.007586 | 0.116729 | 0.438081 | -1.526141 | -1.994283 | -1.014100 | 0.028630 | -0.553238 | -0.540795 | 0.467730 | 0.943285 | 0.498193 | 2 | 0 |
| 26 | -0.291576 | -0.372192 | -1.176599 | 0.078535 | 0.516288 | -1.851892 | -2.218803 | 0.335200 | 0.323222 | 0.006649 | 0.017717 | 0.133172 | 1.208725 | 2 | 0 |
| 27 | 0.953536 | 0.427304 | -0.554063 | 0.425439 | 1.368674 | 0.362392 | 0.477030 | -0.976616 | -0.382390 | 0.310619 | -0.903078 | -0.943886 | -0.047616 | 0 | 0 |
| 28 | -1.172014 | 1.307258 | -1.059323 | -0.655908 | 1.591107 | 0.483432 | 0.474862 | 0.348014 | -0.527448 | 0.798802 | -0.075253 | 1.943808 | 0.108268 | 0 | 0 |
| 29 | -0.954427 | 0.000731 | -0.367958 | 0.281024 | 0.303337 | 0.744504 | 1.271647 | 0.298340 | -0.057042 | -0.297712 | -0.053703 | -0.045043 | -0.561554 | 1 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 156 | 0.232363 | -1.167339 | -0.114632 | 1.240724 | -0.209611 | 0.597503 | -0.105216 | -0.393018 | -0.168804 | -0.038601 | 0.602075 | -0.482733 | 0.192333 | 0 | 1 |
| 157 | -1.686193 | -0.806140 | -0.531342 | -0.411912 | 0.312945 | 0.751058 | 0.624837 | -0.394463 | 0.549120 | -1.174079 | -1.374572 | -1.950144 | -0.652535 | 1 | 1 |
| 158 | 0.487798 | 1.116042 | -0.308817 | 0.175231 | -0.191701 | -0.682970 | 0.502123 | 0.749073 | 1.365476 | 0.198244 | 1.283992 | 0.132188 | 0.482532 | 0 | 1 |
| 159 | 1.049575 | 0.742765 | 0.000505 | 0.670386 | 0.235663 | -0.297404 | 0.891743 | 0.047729 | 0.086633 | 0.873400 | 0.552393 | 0.496793 | 0.659122 | 0 | 1 |
| 160 | 0.285967 | 0.602916 | -0.009050 | 0.802464 | 0.333031 | -1.182611 | 0.473870 | 0.896236 | 0.890391 | 0.208214 | 0.786475 | 0.044481 | -0.114927 | 0 | 1 |
| 161 | 2.568510 | -0.180837 | 0.794882 | 1.410838 | 0.898076 | 0.468184 | 0.963255 | 0.338074 | 2.081580 | 2.353196 | 0.146660 | -0.295606 | -0.020484 | 0 | 1 |
| 162 | 0.821849 | 0.906757 | 0.282262 | 0.304716 | -0.691824 | 0.772704 | 2.543328 | -0.404440 | 1.861464 | 1.635426 | 0.204673 | 0.084333 | 0.469447 | 0 | 1 |
| 163 | 2.581037 | 0.239015 | 1.212048 | 0.498566 | 0.095720 | 0.062469 | 3.463238 | 0.374969 | -0.054235 | -0.365031 | -0.169020 | 1.160964 | 0.666076 | 0 | 1 |
| 164 | -0.247271 | -0.874145 | -0.840584 | 0.233138 | 0.034101 | 0.259892 | 0.144353 | -0.570094 | 1.244117 | 0.282845 | 0.127444 | -0.721587 | -1.450860 | 1 | 1 |
| 165 | 0.188979 | -0.519200 | 0.108496 | -0.513645 | -0.637646 | 0.812515 | 0.626360 | -0.156977 | -0.092241 | -0.517923 | 0.026563 | -0.597616 | -0.101096 | 1 | 1 |
| 166 | -0.015438 | -0.656621 | -0.739614 | 0.302131 | 0.583862 | 0.465267 | 0.342075 | -0.318902 | 0.221544 | 0.654368 | 0.777463 | -0.462212 | -0.867288 | 1 | 1 |
| 167 | -1.567081 | -1.052883 | -0.417918 | 0.636963 | -0.531279 | 0.787238 | -1.913461 | -0.020653 | -0.111129 | 0.112259 | -0.380422 | 0.497894 | 0.709826 | 2 | 1 |
| 168 | -1.883530 | -0.172892 | -0.340073 | -0.255266 | -0.480237 | -0.061425 | -0.158589 | -0.308725 | -0.034923 | 0.150845 | 0.696367 | 0.704196 | 0.473391 | 2 | 1 |
| 169 | -1.577057 | -0.602693 | 0.448785 | 1.073850 | -0.714538 | 1.427240 | -1.645225 | 0.812069 | -0.019466 | -0.719024 | -0.991241 | 0.521497 | 0.461555 | 2 | 1 |
| 170 | 0.667824 | -0.298287 | -0.412356 | -1.154598 | 0.171532 | -0.341146 | -0.411827 | -1.296671 | 0.428160 | -0.233124 | 0.035111 | -0.934501 | 0.123701 | 1 | 1 |
| 171 | 0.626343 | -0.046406 | -0.168999 | -1.278941 | 0.502027 | -0.060296 | 0.062571 | -1.284727 | 0.028380 | -0.488598 | -0.255008 | -1.199407 | 0.088115 | 1 | 1 |
| 172 | -0.441233 | -0.987406 | 0.015664 | -2.982951 | -0.379800 | 1.485748 | -2.174788 | -3.521120 | -0.191502 | 2.486906 | 2.771782 | 0.789523 | 0.553119 | 2 | 1 |
| 173 | 0.331514 | 0.334707 | -0.187508 | 0.489055 | 1.133140 | 1.016598 | -0.572753 | -0.634721 | -0.567790 | -0.492410 | 0.634219 | -0.122575 | -0.205540 | 0 | 1 |
| 174 | 0.480131 | 0.345645 | 0.202709 | -0.423456 | 1.184414 | 2.116965 | -0.463050 | 0.212550 | 1.973473 | -0.996794 | 0.924229 | -0.170049 | -0.068464 | 0 | 1 |
| 175 | 1.039410 | -0.773764 | 0.113739 | -0.796036 | -1.053802 | -1.238009 | 0.153897 | 0.497600 | 1.347261 | 0.972165 | 0.993095 | 0.066125 | -0.069772 | 2 | 1 |
| 176 | 0.194148 | -0.229033 | -0.571129 | -0.704359 | -0.204400 | -0.273049 | 1.105329 | 0.052851 | -0.360196 | 0.099095 | 1.063628 | -0.266594 | 0.041526 | 1 | 1 |
| 177 | -0.169188 | 0.025195 | -0.189648 | 0.376353 | 0.802036 | -1.185140 | 0.488985 | 0.244963 | 1.305631 | 0.241661 | 0.402415 | -0.494815 | 0.252206 | 0 | 1 |
| 178 | 1.433007 | 0.217051 | -0.388425 | -1.158798 | 2.068592 | 1.278810 | -1.193547 | -0.909321 | -0.207122 | 2.062093 | 1.374797 | 0.383804 | 1.569650 | 0 | 1 |
| 179 | 1.371536 | 0.624596 | -0.082552 | 0.444824 | 1.959112 | -0.736647 | -1.448177 | 0.624897 | 1.304939 | -0.025270 | -0.519401 | 0.592135 | 1.312240 | 0 | 1 |
| 180 | 0.815061 | -1.210119 | 0.844643 | -1.152602 | -0.216878 | -1.573232 | -0.065062 | 2.136014 | -0.285964 | 1.827988 | -0.982121 | 1.139199 | 0.936226 | 2 | 1 |
| 181 | 0.814962 | -1.028970 | -1.340094 | -1.579784 | 0.774822 | -0.351654 | -2.148181 | 2.772395 | 1.638263 | -0.394371 | 1.796246 | 1.182459 | 0.824064 | 2 | 1 |
| 182 | 1.615277 | 0.706391 | -0.611277 | 0.513438 | 0.987249 | 1.226124 | 0.240966 | 0.485917 | 1.355615 | -0.480955 | -0.255325 | -0.370864 | 0.107591 | 0 | 1 |
| 183 | 0.290224 | 0.578762 | 0.024629 | 0.119894 | 0.626180 | 1.025427 | 0.180541 | -0.504388 | -1.085411 | -1.413825 | 0.811722 | 0.640653 | 0.433677 | 0 | 1 |
| 184 | 0.086408 | -1.394139 | -0.501233 | 1.251905 | -0.481983 | 0.026482 | -1.317983 | -0.580623 | -0.160381 | -0.718194 | 0.110108 | -0.183905 | 0.074891 | 2 | 1 |
| 185 | 0.024909 | -0.713904 | -1.235134 | -0.194562 | 0.155358 | -0.586587 | -0.455970 | 0.577457 | 1.172268 | 0.468799 | 0.500130 | 1.133624 | 0.192845 | 2 | 1 |
186 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82cc917b8>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[4]))
X = df_n_ps_std_mfcc[4].drop(columns='Cluster')
y = df_n_ps[4]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(164, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
Los parámetros del mejor modelo fueron {'activation': 'relu', 'hidden_layer_sizes': (20, 20, 20), 'learning_rate_init': 0.003, 'max_iter': 400}, que permiten obtener un Accuracy de 72.56% y un Kappa del 45.12
Tiempo total: 20.25 minutos
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_5" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_5 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_14 (Dense) (None, 20) 280 _________________________________________________________________ dense_15 (Dense) (None, 20) 420 _________________________________________________________________ dense_16 (Dense) (None, 20) 420 _________________________________________________________________ dense_17 (Dense) (None, 1) 21 ================================================================= Total params: 1,141 Trainable params: 1,141 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 164 samples, validate on 55 samples Epoch 1/400 164/164 [==============================] - 0s 1ms/step - loss: 0.7149 - accuracy: 0.5244 - val_loss: 0.7183 - val_accuracy: 0.5273 Epoch 2/400 164/164 [==============================] - 0s 67us/step - loss: 0.6628 - accuracy: 0.5854 - val_loss: 0.7022 - val_accuracy: 0.6182 Epoch 3/400 164/164 [==============================] - 0s 73us/step - loss: 0.6490 - accuracy: 0.6707 - val_loss: 0.6958 - val_accuracy: 0.6364 Epoch 4/400 164/164 [==============================] - 0s 67us/step - loss: 0.6445 - accuracy: 0.6707 - val_loss: 0.6991 - val_accuracy: 0.6182 Epoch 5/400 164/164 [==============================] - 0s 73us/step - loss: 0.6399 - accuracy: 0.6646 - val_loss: 0.6941 - val_accuracy: 0.6182 Epoch 6/400 164/164 [==============================] - 0s 73us/step - loss: 0.6294 - accuracy: 0.6951 - val_loss: 0.6918 - val_accuracy: 0.6182 Epoch 7/400 164/164 [==============================] - 0s 104us/step - loss: 0.6196 - accuracy: 0.6951 - val_loss: 0.6923 - val_accuracy: 0.6182 Epoch 8/400 164/164 [==============================] - 0s 73us/step - loss: 0.6167 - accuracy: 0.7134 - val_loss: 0.6890 - val_accuracy: 0.6000 Epoch 9/400 164/164 [==============================] - 0s 67us/step - loss: 0.6139 - accuracy: 0.6829 - val_loss: 0.6929 - val_accuracy: 0.6000 Epoch 10/400 164/164 [==============================] - 0s 73us/step - loss: 0.6108 - accuracy: 0.6890 - val_loss: 0.6929 - val_accuracy: 0.6182 Epoch 11/400 164/164 [==============================] - 0s 79us/step - loss: 0.6044 - accuracy: 0.7012 - val_loss: 0.6875 - val_accuracy: 0.6000 Epoch 12/400 164/164 [==============================] - 0s 79us/step - loss: 0.5918 - accuracy: 0.7073 - val_loss: 0.6807 - val_accuracy: 0.6545 Epoch 13/400 164/164 [==============================] - 0s 67us/step - loss: 0.5862 - accuracy: 0.7073 - val_loss: 0.6789 - val_accuracy: 0.6545 Epoch 14/400 164/164 [==============================] - 0s 110us/step - loss: 0.5762 - accuracy: 0.6890 - val_loss: 0.6712 - val_accuracy: 0.6545 Epoch 15/400 164/164 [==============================] - 0s 73us/step - loss: 0.5665 - accuracy: 0.7073 - val_loss: 0.6710 - val_accuracy: 0.6727 Epoch 16/400 164/164 [==============================] - 0s 73us/step - loss: 0.5596 - accuracy: 0.7134 - val_loss: 0.6779 - val_accuracy: 0.6727 Epoch 17/400 164/164 [==============================] - 0s 73us/step - loss: 0.5537 - accuracy: 0.7012 - val_loss: 0.6888 - val_accuracy: 0.6727 Epoch 18/400 164/164 [==============================] - 0s 73us/step - loss: 0.5451 - accuracy: 0.7134 - val_loss: 0.6966 - val_accuracy: 0.6364 Epoch 19/400 164/164 [==============================] - 0s 79us/step - loss: 0.5349 - accuracy: 0.7256 - val_loss: 0.7024 - val_accuracy: 0.5818 Epoch 20/400 164/164 [==============================] - 0s 79us/step - loss: 0.5252 - accuracy: 0.7378 - val_loss: 0.7101 - val_accuracy: 0.6364 Epoch 21/400 164/164 [==============================] - 0s 104us/step - loss: 0.5172 - accuracy: 0.7256 - val_loss: 0.7167 - val_accuracy: 0.6182 Epoch 22/400 164/164 [==============================] - 0s 79us/step - loss: 0.5140 - accuracy: 0.7317 - val_loss: 0.7129 - val_accuracy: 0.6000 Epoch 23/400 164/164 [==============================] - 0s 73us/step - loss: 0.5070 - accuracy: 0.7378 - val_loss: 0.6954 - val_accuracy: 0.6000 Epoch 24/400 164/164 [==============================] - 0s 73us/step - loss: 0.4857 - accuracy: 0.7744 - val_loss: 0.6973 - val_accuracy: 0.5818 Epoch 25/400 164/164 [==============================] - 0s 79us/step - loss: 0.4784 - accuracy: 0.7927 - val_loss: 0.6920 - val_accuracy: 0.6182 Epoch 00025: ReduceLROnPlateau reducing learning rate to 0.001500000013038516. Epoch 26/400 164/164 [==============================] - 0s 73us/step - loss: 0.4691 - accuracy: 0.8110 - val_loss: 0.6896 - val_accuracy: 0.6364 Epoch 27/400 164/164 [==============================] - 0s 85us/step - loss: 0.4629 - accuracy: 0.8110 - val_loss: 0.6919 - val_accuracy: 0.6182 Epoch 28/400 164/164 [==============================] - 0s 85us/step - loss: 0.4541 - accuracy: 0.8110 - val_loss: 0.6785 - val_accuracy: 0.6364 Epoch 29/400 164/164 [==============================] - 0s 73us/step - loss: 0.4506 - accuracy: 0.8049 - val_loss: 0.6732 - val_accuracy: 0.6364 Epoch 30/400 164/164 [==============================] - 0s 79us/step - loss: 0.4403 - accuracy: 0.8171 - val_loss: 0.6752 - val_accuracy: 0.6182 Epoch 31/400 164/164 [==============================] - 0s 110us/step - loss: 0.4384 - accuracy: 0.8476 - val_loss: 0.6800 - val_accuracy: 0.6182 Epoch 32/400 164/164 [==============================] - 0s 79us/step - loss: 0.4345 - accuracy: 0.8537 - val_loss: 0.6872 - val_accuracy: 0.6182 Epoch 33/400 164/164 [==============================] - 0s 98us/step - loss: 0.4265 - accuracy: 0.8598 - val_loss: 0.6875 - val_accuracy: 0.6545 Epoch 34/400 164/164 [==============================] - 0s 79us/step - loss: 0.4183 - accuracy: 0.8476 - val_loss: 0.6883 - val_accuracy: 0.6727 Epoch 35/400 164/164 [==============================] - 0s 67us/step - loss: 0.4100 - accuracy: 0.8415 - val_loss: 0.6959 - val_accuracy: 0.6727 Epoch 00035: ReduceLROnPlateau reducing learning rate to 0.000750000006519258. Epoch 36/400 164/164 [==============================] - 0s 67us/step - loss: 0.4034 - accuracy: 0.8293 - val_loss: 0.6946 - val_accuracy: 0.7091 Epoch 37/400 164/164 [==============================] - 0s 67us/step - loss: 0.3991 - accuracy: 0.8293 - val_loss: 0.6968 - val_accuracy: 0.6909 Epoch 38/400 164/164 [==============================] - 0s 85us/step - loss: 0.3958 - accuracy: 0.8293 - val_loss: 0.6987 - val_accuracy: 0.6909 Epoch 39/400 164/164 [==============================] - 0s 97us/step - loss: 0.3918 - accuracy: 0.8354 - val_loss: 0.7003 - val_accuracy: 0.6909 Epoch 40/400 164/164 [==============================] - 0s 79us/step - loss: 0.3892 - accuracy: 0.8354 - val_loss: 0.7088 - val_accuracy: 0.6545 Epoch 41/400 164/164 [==============================] - 0s 73us/step - loss: 0.3852 - accuracy: 0.8354 - val_loss: 0.7138 - val_accuracy: 0.6545 Epoch 42/400 164/164 [==============================] - 0s 79us/step - loss: 0.3819 - accuracy: 0.8476 - val_loss: 0.7122 - val_accuracy: 0.6909 Epoch 43/400 164/164 [==============================] - 0s 79us/step - loss: 0.3787 - accuracy: 0.8598 - val_loss: 0.7089 - val_accuracy: 0.6909 Epoch 44/400 164/164 [==============================] - 0s 73us/step - loss: 0.3763 - accuracy: 0.8598 - val_loss: 0.7081 - val_accuracy: 0.6909 Epoch 45/400 164/164 [==============================] - 0s 73us/step - loss: 0.3726 - accuracy: 0.8598 - val_loss: 0.7091 - val_accuracy: 0.6727 Epoch 46/400 164/164 [==============================] - 0s 73us/step - loss: 0.3678 - accuracy: 0.8598 - val_loss: 0.7040 - val_accuracy: 0.6727 Epoch 00046: ReduceLROnPlateau reducing learning rate to 0.000375000003259629. Epoch 47/400 164/164 [==============================] - 0s 98us/step - loss: 0.3655 - accuracy: 0.8598 - val_loss: 0.7071 - val_accuracy: 0.6727 Epoch 48/400 164/164 [==============================] - 0s 85us/step - loss: 0.3630 - accuracy: 0.8598 - val_loss: 0.7106 - val_accuracy: 0.6545 Epoch 49/400 164/164 [==============================] - 0s 73us/step - loss: 0.3608 - accuracy: 0.8598 - val_loss: 0.7129 - val_accuracy: 0.6545 Epoch 50/400 164/164 [==============================] - 0s 79us/step - loss: 0.3591 - accuracy: 0.8659 - val_loss: 0.7160 - val_accuracy: 0.6364 Epoch 51/400 164/164 [==============================] - 0s 73us/step - loss: 0.3573 - accuracy: 0.8659 - val_loss: 0.7198 - val_accuracy: 0.6364 Epoch 52/400 164/164 [==============================] - 0s 79us/step - loss: 0.3555 - accuracy: 0.8659 - val_loss: 0.7224 - val_accuracy: 0.6545 Epoch 53/400 164/164 [==============================] - 0s 73us/step - loss: 0.3539 - accuracy: 0.8659 - val_loss: 0.7248 - val_accuracy: 0.6364 Epoch 54/400 164/164 [==============================] - 0s 85us/step - loss: 0.3524 - accuracy: 0.8659 - val_loss: 0.7281 - val_accuracy: 0.6364 Epoch 55/400 164/164 [==============================] - 0s 98us/step - loss: 0.3514 - accuracy: 0.8902 - val_loss: 0.7324 - val_accuracy: 0.6364 Epoch 56/400 164/164 [==============================] - 0s 79us/step - loss: 0.3504 - accuracy: 0.8841 - val_loss: 0.7356 - val_accuracy: 0.6364 Epoch 00056: ReduceLROnPlateau reducing learning rate to 0.0001875000016298145. Epoch 57/400 164/164 [==============================] - 0s 73us/step - loss: 0.3483 - accuracy: 0.8902 - val_loss: 0.7344 - val_accuracy: 0.6364 Epoch 58/400 164/164 [==============================] - 0s 79us/step - loss: 0.3472 - accuracy: 0.8902 - val_loss: 0.7329 - val_accuracy: 0.6364 Epoch 59/400 164/164 [==============================] - 0s 79us/step - loss: 0.3463 - accuracy: 0.8963 - val_loss: 0.7335 - val_accuracy: 0.6364 Epoch 60/400 164/164 [==============================] - 0s 73us/step - loss: 0.3449 - accuracy: 0.8963 - val_loss: 0.7348 - val_accuracy: 0.6364 Epoch 61/400 164/164 [==============================] - 0s 73us/step - loss: 0.3439 - accuracy: 0.8963 - val_loss: 0.7356 - val_accuracy: 0.6364 Epoch 62/400 164/164 [==============================] - 0s 73us/step - loss: 0.3427 - accuracy: 0.8902 - val_loss: 0.7366 - val_accuracy: 0.6364 Epoch 63/400 164/164 [==============================] - 0s 98us/step - loss: 0.3420 - accuracy: 0.8902 - val_loss: 0.7351 - val_accuracy: 0.6364 Epoch 64/400 164/164 [==============================] - 0s 79us/step - loss: 0.3409 - accuracy: 0.8902 - val_loss: 0.7358 - val_accuracy: 0.6364 Epoch 65/400 164/164 [==============================] - 0s 73us/step - loss: 0.3398 - accuracy: 0.8780 - val_loss: 0.7364 - val_accuracy: 0.6364 Epoch 66/400 164/164 [==============================] - 0s 73us/step - loss: 0.3389 - accuracy: 0.8780 - val_loss: 0.7359 - val_accuracy: 0.6545 Epoch 00066: ReduceLROnPlateau reducing learning rate to 9.375000081490725e-05. Epoch 67/400 164/164 [==============================] - 0s 128us/step - loss: 0.3380 - accuracy: 0.8841 - val_loss: 0.7359 - val_accuracy: 0.6545 Epoch 68/400 164/164 [==============================] - 0s 79us/step - loss: 0.3373 - accuracy: 0.8841 - val_loss: 0.7361 - val_accuracy: 0.6364 Epoch 69/400 164/164 [==============================] - 0s 79us/step - loss: 0.3372 - accuracy: 0.8902 - val_loss: 0.7360 - val_accuracy: 0.6364 Epoch 70/400 164/164 [==============================] - 0s 73us/step - loss: 0.3364 - accuracy: 0.8902 - val_loss: 0.7358 - val_accuracy: 0.6364 Epoch 71/400 164/164 [==============================] - 0s 79us/step - loss: 0.3359 - accuracy: 0.8902 - val_loss: 0.7358 - val_accuracy: 0.6364 Epoch 72/400 164/164 [==============================] - 0s 91us/step - loss: 0.3356 - accuracy: 0.8841 - val_loss: 0.7359 - val_accuracy: 0.6545 Epoch 73/400 164/164 [==============================] - 0s 85us/step - loss: 0.3349 - accuracy: 0.8841 - val_loss: 0.7369 - val_accuracy: 0.6545 Epoch 74/400 164/164 [==============================] - 0s 67us/step - loss: 0.3343 - accuracy: 0.8841 - val_loss: 0.7377 - val_accuracy: 0.6545 Epoch 75/400 164/164 [==============================] - 0s 85us/step - loss: 0.3341 - accuracy: 0.8780 - val_loss: 0.7388 - val_accuracy: 0.6364 Epoch 76/400 164/164 [==============================] - 0s 85us/step - loss: 0.3336 - accuracy: 0.8780 - val_loss: 0.7396 - val_accuracy: 0.6364 Epoch 00076: ReduceLROnPlateau reducing learning rate to 4.6875000407453626e-05. Epoch 77/400 164/164 [==============================] - 0s 73us/step - loss: 0.3332 - accuracy: 0.8841 - val_loss: 0.7399 - val_accuracy: 0.6364 Epoch 78/400 164/164 [==============================] - 0s 73us/step - loss: 0.3331 - accuracy: 0.8841 - val_loss: 0.7402 - val_accuracy: 0.6364 Epoch 79/400 164/164 [==============================] - 0s 73us/step - loss: 0.3328 - accuracy: 0.8841 - val_loss: 0.7402 - val_accuracy: 0.6364 Epoch 80/400 164/164 [==============================] - 0s 73us/step - loss: 0.3326 - accuracy: 0.8841 - val_loss: 0.7405 - val_accuracy: 0.6364 Epoch 81/400 164/164 [==============================] - 0s 73us/step - loss: 0.3324 - accuracy: 0.8841 - val_loss: 0.7409 - val_accuracy: 0.6364 Epoch 82/400 164/164 [==============================] - 0s 79us/step - loss: 0.3322 - accuracy: 0.8841 - val_loss: 0.7409 - val_accuracy: 0.6364 Epoch 83/400 164/164 [==============================] - 0s 79us/step - loss: 0.3319 - accuracy: 0.8902 - val_loss: 0.7408 - val_accuracy: 0.6364 Epoch 84/400 164/164 [==============================] - 0s 91us/step - loss: 0.3317 - accuracy: 0.8902 - val_loss: 0.7409 - val_accuracy: 0.6364 Epoch 85/400 164/164 [==============================] - 0s 79us/step - loss: 0.3315 - accuracy: 0.8902 - val_loss: 0.7411 - val_accuracy: 0.6364 Epoch 86/400 164/164 [==============================] - 0s 73us/step - loss: 0.3312 - accuracy: 0.8902 - val_loss: 0.7412 - val_accuracy: 0.6364 Epoch 00086: ReduceLROnPlateau reducing learning rate to 2.3437500203726813e-05. Epoch 87/400 164/164 [==============================] - 0s 79us/step - loss: 0.3310 - accuracy: 0.8902 - val_loss: 0.7412 - val_accuracy: 0.6364 Epoch 88/400 164/164 [==============================] - 0s 104us/step - loss: 0.3309 - accuracy: 0.8902 - val_loss: 0.7412 - val_accuracy: 0.6364 Epoch 89/400 164/164 [==============================] - 0s 104us/step - loss: 0.3307 - accuracy: 0.8902 - val_loss: 0.7412 - val_accuracy: 0.6364 Epoch 90/400 164/164 [==============================] - 0s 91us/step - loss: 0.3306 - accuracy: 0.8902 - val_loss: 0.7413 - val_accuracy: 0.6364 Epoch 91/400 164/164 [==============================] - 0s 98us/step - loss: 0.3305 - accuracy: 0.8902 - val_loss: 0.7415 - val_accuracy: 0.6364 Epoch 92/400 164/164 [==============================] - 0s 73us/step - loss: 0.3305 - accuracy: 0.8963 - val_loss: 0.7418 - val_accuracy: 0.6364 Epoch 93/400 164/164 [==============================] - 0s 79us/step - loss: 0.3304 - accuracy: 0.8963 - val_loss: 0.7419 - val_accuracy: 0.6364 Epoch 94/400 164/164 [==============================] - 0s 79us/step - loss: 0.3302 - accuracy: 0.8963 - val_loss: 0.7420 - val_accuracy: 0.6364 Epoch 95/400 164/164 [==============================] - 0s 91us/step - loss: 0.3301 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6364 Epoch 96/400 164/164 [==============================] - 0s 79us/step - loss: 0.3300 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6545 Epoch 00096: ReduceLROnPlateau reducing learning rate to 1.1718750101863407e-05. Epoch 97/400 164/164 [==============================] - 0s 79us/step - loss: 0.3299 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 98/400 164/164 [==============================] - 0s 79us/step - loss: 0.3298 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 99/400 164/164 [==============================] - 0s 91us/step - loss: 0.3298 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6545 Epoch 100/400 164/164 [==============================] - 0s 79us/step - loss: 0.3297 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6545 Epoch 101/400 164/164 [==============================] - 0s 79us/step - loss: 0.3297 - accuracy: 0.8963 - val_loss: 0.7422 - val_accuracy: 0.6545 Epoch 102/400 164/164 [==============================] - 0s 79us/step - loss: 0.3296 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 103/400 164/164 [==============================] - 0s 73us/step - loss: 0.3296 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 104/400 164/164 [==============================] - 0s 97us/step - loss: 0.3295 - accuracy: 0.8963 - val_loss: 0.7423 - val_accuracy: 0.6545 Epoch 105/400 164/164 [==============================] - 0s 79us/step - loss: 0.3295 - accuracy: 0.8963 - val_loss: 0.7424 - val_accuracy: 0.6545 Epoch 106/400 164/164 [==============================] - 0s 67us/step - loss: 0.3294 - accuracy: 0.8963 - val_loss: 0.7424 - val_accuracy: 0.6545 Epoch 00106: ReduceLROnPlateau reducing learning rate to 5.859375050931703e-06. Epoch 107/400 164/164 [==============================] - 0s 91us/step - loss: 0.3294 - accuracy: 0.8963 - val_loss: 0.7424 - val_accuracy: 0.6545 Epoch 108/400 164/164 [==============================] - 0s 85us/step - loss: 0.3293 - accuracy: 0.8963 - val_loss: 0.7424 - val_accuracy: 0.6545 Epoch 109/400 164/164 [==============================] - 0s 79us/step - loss: 0.3293 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 110/400 164/164 [==============================] - 0s 79us/step - loss: 0.3293 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 111/400 164/164 [==============================] - 0s 91us/step - loss: 0.3292 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 112/400 164/164 [==============================] - 0s 79us/step - loss: 0.3292 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 113/400 164/164 [==============================] - 0s 79us/step - loss: 0.3292 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 114/400 164/164 [==============================] - 0s 85us/step - loss: 0.3292 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 115/400 164/164 [==============================] - 0s 85us/step - loss: 0.3291 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 116/400 164/164 [==============================] - 0s 85us/step - loss: 0.3291 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 00116: ReduceLROnPlateau reducing learning rate to 2.9296875254658516e-06. Epoch 117/400 164/164 [==============================] - 0s 73us/step - loss: 0.3291 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 118/400 164/164 [==============================] - 0s 85us/step - loss: 0.3291 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 119/400 164/164 [==============================] - 0s 79us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 120/400 164/164 [==============================] - 0s 73us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 121/400 164/164 [==============================] - 0s 79us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 122/400 164/164 [==============================] - 0s 98us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 123/400 164/164 [==============================] - 0s 79us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 124/400 164/164 [==============================] - 0s 73us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 125/400 164/164 [==============================] - 0s 73us/step - loss: 0.3290 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 126/400 164/164 [==============================] - 0s 104us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 00126: ReduceLROnPlateau reducing learning rate to 1.4648437627329258e-06. Epoch 127/400 164/164 [==============================] - 0s 79us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 128/400 164/164 [==============================] - 0s 79us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 129/400 164/164 [==============================] - 0s 79us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 130/400 164/164 [==============================] - 0s 134us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 131/400 164/164 [==============================] - 0s 98us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 132/400 164/164 [==============================] - 0s 85us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7425 - val_accuracy: 0.6545 Epoch 133/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 134/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 135/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 136/400 164/164 [==============================] - 0s 104us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00136: ReduceLROnPlateau reducing learning rate to 7.324218813664629e-07. Epoch 137/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 138/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 139/400 164/164 [==============================] - 0s 73us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 140/400 164/164 [==============================] - 0s 116us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 141/400 164/164 [==============================] - 0s 91us/step - loss: 0.3289 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 142/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 143/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 144/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 145/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 146/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00146: ReduceLROnPlateau reducing learning rate to 3.6621094068323146e-07. Epoch 147/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 148/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 149/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 150/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 151/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 152/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 153/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 154/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 155/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 156/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00156: ReduceLROnPlateau reducing learning rate to 1.8310547034161573e-07. Epoch 157/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 158/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 159/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 160/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 161/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 162/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 163/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 164/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 165/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 166/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00166: ReduceLROnPlateau reducing learning rate to 9.155273517080786e-08. Epoch 167/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 168/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 169/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 170/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 171/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 172/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 173/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 174/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 175/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 176/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00176: ReduceLROnPlateau reducing learning rate to 4.577636758540393e-08. Epoch 177/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 178/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 179/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 180/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 181/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 182/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 183/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 184/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 185/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 186/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00186: ReduceLROnPlateau reducing learning rate to 2.2888183792701966e-08. Epoch 187/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 188/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 189/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 190/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 191/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 192/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 193/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 194/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 195/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 196/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00196: ReduceLROnPlateau reducing learning rate to 1.1444091896350983e-08. Epoch 197/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 198/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 199/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 200/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 201/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 202/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 203/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 204/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 205/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 206/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00206: ReduceLROnPlateau reducing learning rate to 5.7220459481754915e-09. Epoch 207/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 208/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 209/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 210/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 211/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 212/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 213/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 214/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 215/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 216/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00216: ReduceLROnPlateau reducing learning rate to 2.8610229740877458e-09. Epoch 217/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 218/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 219/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 220/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 221/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 222/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 223/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 224/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 225/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 226/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00226: ReduceLROnPlateau reducing learning rate to 1.4305114870438729e-09. Epoch 227/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 228/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 229/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 230/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 231/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 232/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 233/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 234/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 235/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 236/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00236: ReduceLROnPlateau reducing learning rate to 7.152557435219364e-10. Epoch 237/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 238/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 239/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 240/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 241/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 242/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 243/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 244/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 245/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 246/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00246: ReduceLROnPlateau reducing learning rate to 3.576278717609682e-10. Epoch 247/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 248/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 249/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 250/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 251/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 252/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 253/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 254/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 255/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 256/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00256: ReduceLROnPlateau reducing learning rate to 1.788139358804841e-10. Epoch 257/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 258/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 259/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 260/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 261/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 262/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 263/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 264/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 265/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 266/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00266: ReduceLROnPlateau reducing learning rate to 8.940696794024205e-11. Epoch 267/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 268/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 269/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 270/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 271/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 272/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 273/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 274/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 275/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 276/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00276: ReduceLROnPlateau reducing learning rate to 4.470348397012103e-11. Epoch 277/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 278/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 279/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 280/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 281/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 282/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 283/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 284/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 285/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 286/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00286: ReduceLROnPlateau reducing learning rate to 2.2351741985060514e-11. Epoch 287/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 288/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 289/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 290/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 291/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 292/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 293/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 294/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 295/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 296/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00296: ReduceLROnPlateau reducing learning rate to 1.1175870992530257e-11. Epoch 297/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 298/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 299/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 300/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 301/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 302/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 303/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 304/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 305/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 306/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00306: ReduceLROnPlateau reducing learning rate to 5.5879354962651284e-12. Epoch 307/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 308/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 309/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 310/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 311/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 312/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 313/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 314/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 315/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 316/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00316: ReduceLROnPlateau reducing learning rate to 2.7939677481325642e-12. Epoch 317/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 318/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 319/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 320/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 321/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 322/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 323/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 324/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 325/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 326/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00326: ReduceLROnPlateau reducing learning rate to 1.3969838740662821e-12. Epoch 327/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 328/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 329/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 330/400 164/164 [==============================] - 0s 67us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 331/400 164/164 [==============================] - ETA: 0s - loss: 0.3229 - accuracy: 0.90 - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 332/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 333/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 334/400 164/164 [==============================] - 0s 91us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 335/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 336/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00336: ReduceLROnPlateau reducing learning rate to 6.984919370331411e-13. Epoch 337/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 338/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 339/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 340/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 341/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 342/400 164/164 [==============================] - 0s 85us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 343/400 164/164 [==============================] - 0s 79us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 344/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 345/400 164/164 [==============================] - 0s 73us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 346/400 164/164 [==============================] - 0s 97us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00346: ReduceLROnPlateau reducing learning rate to 3.4924596851657053e-13. Epoch 347/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 348/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 349/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 350/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 351/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 352/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 353/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 354/400 164/164 [==============================] - 0s 158us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 355/400 164/164 [==============================] - 0s 140us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 356/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00356: ReduceLROnPlateau reducing learning rate to 1.7462298425828526e-13. Epoch 357/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 358/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 359/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 360/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 361/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 362/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 363/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 364/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 365/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 366/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00366: ReduceLROnPlateau reducing learning rate to 8.731149212914263e-14. Epoch 367/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 368/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 369/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 370/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 371/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 372/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 373/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 374/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 375/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 376/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00376: ReduceLROnPlateau reducing learning rate to 4.3655746064571316e-14. Epoch 377/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 378/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 379/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 380/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 381/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 382/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 383/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 384/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 385/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 386/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00386: ReduceLROnPlateau reducing learning rate to 2.1827873032285658e-14. Epoch 387/400 164/164 [==============================] - 0s 134us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 388/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 389/400 164/164 [==============================] - 0s 110us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 390/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 391/400 164/164 [==============================] - 0s 128us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 392/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 393/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 394/400 164/164 [==============================] - 0s 140us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 395/400 164/164 [==============================] - 0s 122us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 396/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 00396: ReduceLROnPlateau reducing learning rate to 1.0913936516142829e-14. Epoch 397/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 398/400 164/164 [==============================] - 0s 116us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 399/400 164/164 [==============================] - 0s 98us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545 Epoch 400/400 164/164 [==============================] - 0s 104us/step - loss: 0.3288 - accuracy: 0.8963 - val_loss: 0.7426 - val_accuracy: 0.6545
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 400)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
55/55 [==============================] - 0s 55us/step test loss: 0.7425676215778697, test accuracy: 0.6545454263687134
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.6399456521739131
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.2943956785955435
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.992062 | -0.477172 | -1.079451 | -2.369470 | -1.705431 | -0.098594 | -0.281836 | -1.432001 | -0.898623 | 0.130446 | -0.024683 | -0.312128 | 0.020392 |
| 1 | 0.843575 | -0.507672 | -0.731713 | -0.334904 | 1.442336 | -0.491141 | -0.266416 | -0.511246 | 1.004414 | 0.558777 | 0.127114 | -1.667555 | 0.835458 |
| 2 | 0.816922 | -0.263544 | 0.639646 | -0.865417 | 1.276602 | -0.245238 | 0.106722 | -0.761365 | -0.170481 | -1.443667 | -0.451102 | 1.196430 | -0.037846 |
| 3 | 4.368525 | 0.851784 | -0.671158 | -0.128467 | 2.141169 | -0.472725 | -1.437233 | -1.858760 | 1.581800 | -0.145852 | 0.107228 | 1.458238 | 1.666081 |
| 4 | 0.001312 | 0.535305 | -0.648296 | 0.221414 | 0.549478 | 0.736878 | -0.439538 | -0.138787 | 0.584258 | 0.095671 | 1.901833 | 2.909252 | 1.802578 |
| 5 | -0.236754 | 0.488978 | 0.203743 | 0.088401 | -0.151814 | 0.811707 | -0.092973 | 0.153518 | -0.936863 | 0.354100 | 0.123352 | 1.318569 | 1.097711 |
| 6 | -0.842496 | 0.742173 | 0.068601 | 1.394492 | -0.276167 | 1.301853 | 0.336343 | 1.077540 | -1.118983 | 1.688235 | -0.103661 | 1.224883 | 0.350956 |
| 7 | -0.952702 | 1.078642 | -0.563379 | -0.018149 | -0.073042 | -0.591301 | -1.392389 | 0.209234 | 0.725065 | 0.064350 | 0.034449 | 0.581953 | 2.151966 |
| 8 | 0.046457 | -0.093025 | -0.804385 | 0.542662 | -0.130939 | 0.042792 | 1.198959 | -0.559116 | 0.017192 | -0.249308 | 0.747851 | -0.035599 | 0.995166 |
| 9 | -0.781158 | 0.099463 | 0.196737 | 2.462131 | 0.316140 | -0.369698 | 2.196715 | -0.800443 | 2.137687 | 1.438443 | 0.055279 | -0.284437 | 1.702942 |
| 10 | -0.906167 | 0.568017 | 0.700382 | 2.876646 | -0.809125 | -0.491839 | 1.801564 | -2.406947 | 1.939246 | 1.397556 | 0.709408 | -0.423394 | 1.773713 |
| 11 | 1.172687 | 1.292213 | -0.402038 | 0.087342 | 0.324539 | 0.973336 | -0.548282 | 0.781195 | 0.846038 | 0.464514 | -1.030463 | -0.559243 | 0.168727 |
| 12 | 0.367875 | 1.949889 | 0.516382 | 0.657124 | -0.534306 | 0.575187 | -0.750861 | 0.247200 | -0.232297 | 0.332174 | -0.426787 | 0.318763 | 0.083316 |
| 13 | 1.270520 | 1.194102 | 0.267933 | 0.676186 | 0.394734 | -0.709975 | -0.047626 | 1.113385 | 0.339962 | 0.424937 | -0.528480 | 0.671225 | 0.078062 |
| 14 | -0.095931 | 0.792392 | 0.626113 | 0.189989 | 0.315198 | -0.175744 | 0.011713 | -0.072196 | 0.742338 | 0.974567 | 0.935685 | 0.083454 | 0.970157 |
| 15 | -0.322645 | 0.977766 | 0.685697 | 0.670670 | 0.997903 | 0.619018 | 0.498110 | -0.016728 | 0.445370 | -0.102204 | 0.199517 | -0.315303 | 0.347920 |
| 16 | 0.565974 | 0.440551 | 0.402995 | 1.815814 | 1.906139 | 1.105013 | 1.256180 | 0.907086 | 0.592851 | -0.159427 | 1.013051 | -0.620202 | 1.259932 |
| 17 | -0.863540 | 0.887127 | 1.387720 | -0.082168 | -0.694633 | -0.810037 | 1.251697 | -0.443532 | 0.307506 | 0.253798 | -0.292483 | 0.030812 | 0.176350 |
| 18 | -0.822258 | -0.630193 | -0.672294 | -0.279417 | -0.731983 | -1.510167 | -1.393705 | -0.161872 | 0.722297 | 0.910604 | -0.610303 | 0.380547 | 1.296315 |
| 19 | -0.889164 | 0.641922 | 2.278761 | 0.190213 | -0.341231 | -0.624107 | 1.228820 | -0.549441 | -0.662942 | 0.481866 | -0.541347 | -1.061735 | -0.122227 |
| 20 | 0.795964 | 0.484784 | 0.898919 | 0.027625 | 0.415359 | 0.271286 | 0.366966 | -0.498975 | 0.300352 | 0.216702 | 0.361195 | -0.771976 | 0.085971 |
| 21 | 0.168183 | -0.077353 | 1.019887 | -0.637065 | 0.731534 | 0.877245 | 1.225125 | -0.566997 | -0.452222 | -1.105384 | 0.185636 | -0.782808 | -0.224975 |
| 22 | 0.510023 | -0.099060 | 0.064384 | -0.039933 | 0.786951 | 0.119530 | -0.259052 | -0.881354 | -0.113425 | 1.191274 | 0.335443 | -0.189618 | -0.337688 |
| 23 | 0.216210 | -0.069447 | 0.974822 | -0.626273 | 0.835854 | 0.914236 | 1.226463 | -0.369525 | -0.398299 | -1.146613 | 0.026274 | -0.944475 | -0.192948 |
| 24 | -0.239273 | -0.518568 | -0.127834 | 0.045011 | 0.403223 | 0.368253 | -0.584902 | -0.905436 | -0.405699 | 0.129383 | 0.809611 | -0.174138 | -0.115393 |
| 25 | -1.241907 | 1.355534 | -0.693470 | 0.793789 | 0.606007 | 0.930263 | 0.009323 | -0.712463 | 0.037916 | -0.182143 | 1.212760 | -0.083882 | 0.639662 |
| 26 | -0.847436 | 1.180146 | -0.489592 | 1.189572 | -0.457645 | -0.163979 | -0.010812 | -0.765561 | -0.347488 | -0.216575 | 0.804302 | -0.236378 | 0.481212 |
| 27 | -0.378383 | 1.017722 | -1.812001 | 0.443514 | 0.583209 | 1.709730 | 0.715521 | -0.076610 | 0.416120 | 0.013436 | 0.420025 | -0.925263 | 0.626400 |
| 28 | 0.245370 | 1.187084 | 1.056929 | 2.013063 | -0.505622 | 1.228583 | -1.158143 | 0.622932 | 0.113512 | 0.948397 | 0.008252 | 1.035839 | -0.691702 |
| 29 | -0.623386 | 1.368898 | 1.216933 | 1.961377 | 0.744541 | 1.555516 | -1.205283 | -0.252995 | -0.325624 | 0.538668 | 0.197646 | 0.356450 | -0.219812 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 189 | -0.565077 | 0.809784 | 0.557457 | 0.815038 | 0.823053 | -0.931359 | -0.039244 | -0.199068 | 0.083690 | -0.235063 | -0.030800 | -0.564557 | -0.253507 |
| 190 | -0.602848 | 0.638838 | 0.763481 | -0.424641 | -0.810302 | -0.951734 | -0.732024 | -0.504038 | 0.379372 | 0.748895 | -0.593820 | -0.772491 | 0.175752 |
| 191 | -1.094031 | -0.896961 | 0.400325 | -1.635971 | -1.099938 | -1.091799 | -0.593281 | 0.890889 | 0.984647 | 0.584509 | 0.318496 | 0.175062 | -0.783524 |
| 192 | -0.348357 | 0.944340 | 0.239675 | 0.003612 | -1.370450 | -0.996597 | -0.616405 | 0.161481 | -0.258760 | 0.534721 | -0.431338 | 0.376456 | -1.623026 |
| 193 | 2.110671 | -1.005236 | 0.268022 | 0.459390 | -1.985350 | 0.405677 | -0.361571 | -1.272053 | -0.873345 | 2.111218 | -0.246708 | 0.798456 | 1.067252 |
| 194 | 1.222194 | -1.600122 | -1.149302 | 0.230839 | -0.213026 | -1.572114 | 0.486447 | -0.770701 | 0.244895 | 2.689114 | -2.296486 | 0.718338 | -1.220356 |
| 195 | -0.509789 | -0.757711 | 0.189267 | 0.516644 | 0.750906 | -1.485714 | 2.485824 | -1.204754 | -3.373113 | -0.450016 | -1.091178 | -0.474728 | -0.522197 |
| 196 | 0.194175 | -0.618441 | -1.090420 | 0.233017 | -1.492602 | -0.342192 | -1.612833 | 0.714990 | 0.072755 | -0.026932 | 0.464029 | 0.212333 | 1.204262 |
| 197 | 0.297635 | -0.727616 | -1.927078 | -0.145347 | -0.990256 | 0.052935 | -1.791108 | -0.351333 | -0.064903 | 0.201842 | 1.581215 | 1.084453 | -0.168841 |
| 198 | -0.271030 | -0.575137 | -1.005334 | -0.238705 | -0.931830 | -1.319114 | -0.668613 | 0.510822 | 0.209623 | 0.487577 | 0.154874 | 0.133768 | 1.259548 |
| 199 | 0.059096 | -0.370313 | -0.760047 | 0.706270 | -2.488266 | -1.336692 | -0.683584 | 0.436366 | -0.150281 | -0.711308 | -0.851205 | 0.253942 | -0.052516 |
| 200 | 0.147539 | -0.233608 | -0.578016 | 0.870637 | -2.418094 | -1.286070 | -0.692623 | 0.342693 | 0.015890 | -0.795418 | -1.221248 | 0.309493 | -0.526480 |
| 201 | -0.076214 | -1.055629 | 0.159389 | -0.403318 | -0.111273 | -1.325990 | -0.867502 | 0.519381 | 0.192007 | -0.024629 | 0.220420 | 0.551046 | 0.399728 |
| 202 | 1.468986 | 0.518464 | 1.475456 | -1.400891 | 0.408186 | -1.831201 | 1.474742 | 0.566660 | -0.403197 | -1.295176 | -0.443787 | -1.884346 | -1.993491 |
| 203 | -1.739107 | 0.192104 | -0.670709 | -1.236237 | -1.672915 | -0.680127 | 0.027148 | 0.524909 | 1.865754 | -0.634310 | -0.607429 | -1.471191 | -0.632982 |
| 204 | -0.663868 | -0.862566 | -0.329803 | -0.857680 | 0.167824 | -0.013328 | 0.176565 | 0.125832 | 0.609671 | -1.296827 | -0.435986 | -1.341223 | -0.977207 |
| 205 | -0.739818 | -0.668220 | -0.077479 | 0.026286 | 0.027801 | 0.040659 | -0.161646 | -1.046948 | -1.248976 | -0.449243 | 1.046834 | 1.381194 | 1.646325 |
| 206 | 0.475752 | 0.695473 | -0.072097 | 1.081397 | -0.366985 | -2.008080 | 0.515734 | 0.005330 | 1.193800 | -0.841825 | -2.650200 | -3.862624 | -2.115507 |
| 207 | -1.331365 | -1.632552 | -0.876636 | 0.076190 | 1.187799 | 1.138590 | 1.235955 | 1.583447 | 0.890342 | -1.587964 | 0.546109 | 1.565567 | 1.756993 |
| 208 | -0.397476 | 0.090963 | 1.217996 | 0.773741 | 1.107204 | -1.125870 | -0.915396 | -1.130561 | -1.914456 | -0.664474 | -0.226576 | 0.112420 | 0.235011 |
| 209 | -0.465823 | -1.372705 | -0.445436 | 0.316510 | -1.492946 | -1.103783 | 0.353513 | -0.311377 | -1.095388 | -0.615078 | -0.585868 | 0.172807 | -0.860564 |
| 210 | -0.594535 | -1.761364 | -1.069906 | -0.502969 | -1.411276 | -0.906350 | -0.559102 | -1.240920 | -2.254196 | -1.206339 | -0.528047 | 0.924112 | 0.472298 |
| 211 | -1.022693 | 0.373374 | -0.104205 | -0.815628 | -0.574733 | 0.906934 | 0.765114 | -0.015386 | 0.110695 | 1.832325 | 0.712557 | -0.951976 | -0.678869 |
| 212 | -0.967902 | 0.155275 | 0.013938 | -0.549105 | -0.907792 | 0.881907 | 0.609589 | -0.135010 | -0.373473 | 1.152134 | 0.386511 | -0.744687 | -0.447017 |
| 213 | -1.238242 | -0.062983 | -0.133082 | -0.158458 | -0.338086 | -0.411874 | 0.964537 | 0.870379 | 0.530337 | 0.858339 | 0.489332 | -1.190977 | -1.340484 |
| 214 | 0.349761 | -1.391267 | -3.069473 | 0.840195 | 1.044391 | -1.052018 | 1.004856 | 1.478511 | 1.210060 | -1.145325 | 2.653757 | 1.937234 | 0.592139 |
| 215 | 0.782819 | -1.300386 | -0.487318 | 0.850960 | -2.046427 | 1.050631 | 0.289069 | 2.400271 | 2.707288 | -0.278238 | 0.152360 | 1.912210 | -0.208225 |
| 216 | 1.847553 | -1.059174 | -0.808403 | 0.400706 | -0.275009 | 0.409744 | -0.141885 | 0.706348 | 0.476002 | 0.990111 | -0.168504 | 0.856440 | -0.395652 |
| 217 | 2.608478 | 0.174234 | 2.534211 | -0.985597 | -0.436400 | 3.751943 | 1.560179 | -2.367095 | 1.272529 | 2.464209 | -0.954336 | 0.310720 | -1.209456 |
| 218 | -0.069569 | 0.418008 | -0.004324 | 1.330358 | 0.365352 | -0.582788 | -0.527444 | -0.298114 | -0.353021 | -1.118883 | -0.459230 | -0.986241 | -0.041010 |
219 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[2847.0, 2572.5760570812117, 2370.209947155015, 2235.6129406180157, 2112.951551625758, 2041.1809211260454, 1982.3615393500422, 1899.0667595696164, 1851.9267246215204, 1760.4468946465518, 1745.79714786859, 1689.1350809615656, 1657.4940102564742, 1625.370413913055]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82d2ffda0>]
K=3
kmeans_mfcc = KMeans(n_clusters=3, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=3, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([1, 2, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 2,
1, 2, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0,
0, 0, 1, 1, 2, 0, 2, 2, 0, 0, 0, 1, 1, 0, 1, 1, 1, 2, 0, 0, 1, 1,
1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 2,
0, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 0, 1, 0, 2, 2, 2, 2, 1, 2, 2, 2,
2, 0, 1, 2, 0, 0, 2, 2, 2, 2, 1, 1, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0,
0, 0, 1, 0, 2, 1, 1, 1, 2, 0, 1, 0, 0, 1, 1, 1, 2, 0, 1, 2, 2, 2,
1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 0, 1, 0, 0, 2, 0, 2, 2, 2, 0, 2,
2, 1, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 1, 2, 1, 1, 1, 2, 1, 1,
1, 1, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 1, 2, 2, 2, 0, 0, 1, 0, 1])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([1, 2, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 2,
1, 2, 1, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 1, 1, 2, 2, 2, 1, 1, 1, 0,
0, 0, 1, 1, 2, 0, 2, 2, 0, 0, 0, 1, 1, 0, 1, 1, 1, 2, 0, 0, 1, 1,
1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 2, 2, 1, 1, 1, 1, 1, 0, 0, 0, 2,
0, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 0, 1, 0, 2, 2, 2, 2, 1, 2, 2, 2,
2, 0, 1, 2, 0, 0, 2, 2, 2, 2, 1, 1, 0, 0, 0, 2, 2, 2, 2, 2, 0, 0,
0, 0, 1, 0, 2, 1, 1, 1, 2, 0, 1, 0, 0, 1, 1, 1, 2, 0, 1, 2, 2, 2,
1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 0, 1, 0, 0, 2, 0, 2, 2, 2, 0, 2,
2, 1, 2, 0, 0, 0, 0, 0, 0, 0, 2, 0, 1, 0, 1, 2, 1, 1, 1, 2, 1, 1,
1, 1, 1, 1, 2, 2, 2, 1, 2, 0, 1, 1, 1, 2, 2, 2, 0, 0, 1, 0, 1])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.992062 | -0.477172 | -1.079451 | -2.369470 | -1.705431 | -0.098594 | -0.281836 | -1.432001 | -0.898623 | 0.130446 | -0.024683 | -0.312128 | 0.020392 | 1 | 0 |
| 1 | 0.843575 | -0.507672 | -0.731713 | -0.334904 | 1.442336 | -0.491141 | -0.266416 | -0.511246 | 1.004414 | 0.558777 | 0.127114 | -1.667555 | 0.835458 | 2 | 0 |
| 2 | 0.816922 | -0.263544 | 0.639646 | -0.865417 | 1.276602 | -0.245238 | 0.106722 | -0.761365 | -0.170481 | -1.443667 | -0.451102 | 1.196430 | -0.037846 | 1 | 0 |
| 3 | 4.368525 | 0.851784 | -0.671158 | -0.128467 | 2.141169 | -0.472725 | -1.437233 | -1.858760 | 1.581800 | -0.145852 | 0.107228 | 1.458238 | 1.666081 | 1 | 0 |
| 4 | 0.001312 | 0.535305 | -0.648296 | 0.221414 | 0.549478 | 0.736878 | -0.439538 | -0.138787 | 0.584258 | 0.095671 | 1.901833 | 2.909252 | 1.802578 | 0 | 0 |
| 5 | -0.236754 | 0.488978 | 0.203743 | 0.088401 | -0.151814 | 0.811707 | -0.092973 | 0.153518 | -0.936863 | 0.354100 | 0.123352 | 1.318569 | 1.097711 | 0 | 0 |
| 6 | -0.842496 | 0.742173 | 0.068601 | 1.394492 | -0.276167 | 1.301853 | 0.336343 | 1.077540 | -1.118983 | 1.688235 | -0.103661 | 1.224883 | 0.350956 | 0 | 0 |
| 7 | -0.952702 | 1.078642 | -0.563379 | -0.018149 | -0.073042 | -0.591301 | -1.392389 | 0.209234 | 0.725065 | 0.064350 | 0.034449 | 0.581953 | 2.151966 | 1 | 0 |
| 8 | 0.046457 | -0.093025 | -0.804385 | 0.542662 | -0.130939 | 0.042792 | 1.198959 | -0.559116 | 0.017192 | -0.249308 | 0.747851 | -0.035599 | 0.995166 | 0 | 0 |
| 9 | -0.781158 | 0.099463 | 0.196737 | 2.462131 | 0.316140 | -0.369698 | 2.196715 | -0.800443 | 2.137687 | 1.438443 | 0.055279 | -0.284437 | 1.702942 | 0 | 0 |
| 10 | -0.906167 | 0.568017 | 0.700382 | 2.876646 | -0.809125 | -0.491839 | 1.801564 | -2.406947 | 1.939246 | 1.397556 | 0.709408 | -0.423394 | 1.773713 | 0 | 0 |
| 11 | 1.172687 | 1.292213 | -0.402038 | 0.087342 | 0.324539 | 0.973336 | -0.548282 | 0.781195 | 0.846038 | 0.464514 | -1.030463 | -0.559243 | 0.168727 | 0 | 0 |
| 12 | 0.367875 | 1.949889 | 0.516382 | 0.657124 | -0.534306 | 0.575187 | -0.750861 | 0.247200 | -0.232297 | 0.332174 | -0.426787 | 0.318763 | 0.083316 | 0 | 0 |
| 13 | 1.270520 | 1.194102 | 0.267933 | 0.676186 | 0.394734 | -0.709975 | -0.047626 | 1.113385 | 0.339962 | 0.424937 | -0.528480 | 0.671225 | 0.078062 | 0 | 0 |
| 14 | -0.095931 | 0.792392 | 0.626113 | 0.189989 | 0.315198 | -0.175744 | 0.011713 | -0.072196 | 0.742338 | 0.974567 | 0.935685 | 0.083454 | 0.970157 | 0 | 0 |
| 15 | -0.322645 | 0.977766 | 0.685697 | 0.670670 | 0.997903 | 0.619018 | 0.498110 | -0.016728 | 0.445370 | -0.102204 | 0.199517 | -0.315303 | 0.347920 | 0 | 0 |
| 16 | 0.565974 | 0.440551 | 0.402995 | 1.815814 | 1.906139 | 1.105013 | 1.256180 | 0.907086 | 0.592851 | -0.159427 | 1.013051 | -0.620202 | 1.259932 | 0 | 0 |
| 17 | -0.863540 | 0.887127 | 1.387720 | -0.082168 | -0.694633 | -0.810037 | 1.251697 | -0.443532 | 0.307506 | 0.253798 | -0.292483 | 0.030812 | 0.176350 | 0 | 0 |
| 18 | -0.822258 | -0.630193 | -0.672294 | -0.279417 | -0.731983 | -1.510167 | -1.393705 | -0.161872 | 0.722297 | 0.910604 | -0.610303 | 0.380547 | 1.296315 | 1 | 0 |
| 19 | -0.889164 | 0.641922 | 2.278761 | 0.190213 | -0.341231 | -0.624107 | 1.228820 | -0.549441 | -0.662942 | 0.481866 | -0.541347 | -1.061735 | -0.122227 | 2 | 0 |
| 20 | 0.795964 | 0.484784 | 0.898919 | 0.027625 | 0.415359 | 0.271286 | 0.366966 | -0.498975 | 0.300352 | 0.216702 | 0.361195 | -0.771976 | 0.085971 | 0 | 0 |
| 21 | 0.168183 | -0.077353 | 1.019887 | -0.637065 | 0.731534 | 0.877245 | 1.225125 | -0.566997 | -0.452222 | -1.105384 | 0.185636 | -0.782808 | -0.224975 | 2 | 0 |
| 22 | 0.510023 | -0.099060 | 0.064384 | -0.039933 | 0.786951 | 0.119530 | -0.259052 | -0.881354 | -0.113425 | 1.191274 | 0.335443 | -0.189618 | -0.337688 | 1 | 0 |
| 23 | 0.216210 | -0.069447 | 0.974822 | -0.626273 | 0.835854 | 0.914236 | 1.226463 | -0.369525 | -0.398299 | -1.146613 | 0.026274 | -0.944475 | -0.192948 | 2 | 0 |
| 24 | -0.239273 | -0.518568 | -0.127834 | 0.045011 | 0.403223 | 0.368253 | -0.584902 | -0.905436 | -0.405699 | 0.129383 | 0.809611 | -0.174138 | -0.115393 | 1 | 0 |
| 25 | -1.241907 | 1.355534 | -0.693470 | 0.793789 | 0.606007 | 0.930263 | 0.009323 | -0.712463 | 0.037916 | -0.182143 | 1.212760 | -0.083882 | 0.639662 | 0 | 0 |
| 26 | -0.847436 | 1.180146 | -0.489592 | 1.189572 | -0.457645 | -0.163979 | -0.010812 | -0.765561 | -0.347488 | -0.216575 | 0.804302 | -0.236378 | 0.481212 | 0 | 0 |
| 27 | -0.378383 | 1.017722 | -1.812001 | 0.443514 | 0.583209 | 1.709730 | 0.715521 | -0.076610 | 0.416120 | 0.013436 | 0.420025 | -0.925263 | 0.626400 | 0 | 0 |
| 28 | 0.245370 | 1.187084 | 1.056929 | 2.013063 | -0.505622 | 1.228583 | -1.158143 | 0.622932 | 0.113512 | 0.948397 | 0.008252 | 1.035839 | -0.691702 | 0 | 0 |
| 29 | -0.623386 | 1.368898 | 1.216933 | 1.961377 | 0.744541 | 1.555516 | -1.205283 | -0.252995 | -0.325624 | 0.538668 | 0.197646 | 0.356450 | -0.219812 | 0 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 189 | -0.565077 | 0.809784 | 0.557457 | 0.815038 | 0.823053 | -0.931359 | -0.039244 | -0.199068 | 0.083690 | -0.235063 | -0.030800 | -0.564557 | -0.253507 | 0 | 1 |
| 190 | -0.602848 | 0.638838 | 0.763481 | -0.424641 | -0.810302 | -0.951734 | -0.732024 | -0.504038 | 0.379372 | 0.748895 | -0.593820 | -0.772491 | 0.175752 | 1 | 1 |
| 191 | -1.094031 | -0.896961 | 0.400325 | -1.635971 | -1.099938 | -1.091799 | -0.593281 | 0.890889 | 0.984647 | 0.584509 | 0.318496 | 0.175062 | -0.783524 | 2 | 1 |
| 192 | -0.348357 | 0.944340 | 0.239675 | 0.003612 | -1.370450 | -0.996597 | -0.616405 | 0.161481 | -0.258760 | 0.534721 | -0.431338 | 0.376456 | -1.623026 | 1 | 1 |
| 193 | 2.110671 | -1.005236 | 0.268022 | 0.459390 | -1.985350 | 0.405677 | -0.361571 | -1.272053 | -0.873345 | 2.111218 | -0.246708 | 0.798456 | 1.067252 | 1 | 1 |
| 194 | 1.222194 | -1.600122 | -1.149302 | 0.230839 | -0.213026 | -1.572114 | 0.486447 | -0.770701 | 0.244895 | 2.689114 | -2.296486 | 0.718338 | -1.220356 | 1 | 1 |
| 195 | -0.509789 | -0.757711 | 0.189267 | 0.516644 | 0.750906 | -1.485714 | 2.485824 | -1.204754 | -3.373113 | -0.450016 | -1.091178 | -0.474728 | -0.522197 | 2 | 1 |
| 196 | 0.194175 | -0.618441 | -1.090420 | 0.233017 | -1.492602 | -0.342192 | -1.612833 | 0.714990 | 0.072755 | -0.026932 | 0.464029 | 0.212333 | 1.204262 | 1 | 1 |
| 197 | 0.297635 | -0.727616 | -1.927078 | -0.145347 | -0.990256 | 0.052935 | -1.791108 | -0.351333 | -0.064903 | 0.201842 | 1.581215 | 1.084453 | -0.168841 | 1 | 1 |
| 198 | -0.271030 | -0.575137 | -1.005334 | -0.238705 | -0.931830 | -1.319114 | -0.668613 | 0.510822 | 0.209623 | 0.487577 | 0.154874 | 0.133768 | 1.259548 | 1 | 1 |
| 199 | 0.059096 | -0.370313 | -0.760047 | 0.706270 | -2.488266 | -1.336692 | -0.683584 | 0.436366 | -0.150281 | -0.711308 | -0.851205 | 0.253942 | -0.052516 | 1 | 1 |
| 200 | 0.147539 | -0.233608 | -0.578016 | 0.870637 | -2.418094 | -1.286070 | -0.692623 | 0.342693 | 0.015890 | -0.795418 | -1.221248 | 0.309493 | -0.526480 | 1 | 1 |
| 201 | -0.076214 | -1.055629 | 0.159389 | -0.403318 | -0.111273 | -1.325990 | -0.867502 | 0.519381 | 0.192007 | -0.024629 | 0.220420 | 0.551046 | 0.399728 | 1 | 1 |
| 202 | 1.468986 | 0.518464 | 1.475456 | -1.400891 | 0.408186 | -1.831201 | 1.474742 | 0.566660 | -0.403197 | -1.295176 | -0.443787 | -1.884346 | -1.993491 | 2 | 1 |
| 203 | -1.739107 | 0.192104 | -0.670709 | -1.236237 | -1.672915 | -0.680127 | 0.027148 | 0.524909 | 1.865754 | -0.634310 | -0.607429 | -1.471191 | -0.632982 | 2 | 1 |
| 204 | -0.663868 | -0.862566 | -0.329803 | -0.857680 | 0.167824 | -0.013328 | 0.176565 | 0.125832 | 0.609671 | -1.296827 | -0.435986 | -1.341223 | -0.977207 | 2 | 1 |
| 205 | -0.739818 | -0.668220 | -0.077479 | 0.026286 | 0.027801 | 0.040659 | -0.161646 | -1.046948 | -1.248976 | -0.449243 | 1.046834 | 1.381194 | 1.646325 | 1 | 1 |
| 206 | 0.475752 | 0.695473 | -0.072097 | 1.081397 | -0.366985 | -2.008080 | 0.515734 | 0.005330 | 1.193800 | -0.841825 | -2.650200 | -3.862624 | -2.115507 | 2 | 1 |
| 207 | -1.331365 | -1.632552 | -0.876636 | 0.076190 | 1.187799 | 1.138590 | 1.235955 | 1.583447 | 0.890342 | -1.587964 | 0.546109 | 1.565567 | 1.756993 | 0 | 1 |
| 208 | -0.397476 | 0.090963 | 1.217996 | 0.773741 | 1.107204 | -1.125870 | -0.915396 | -1.130561 | -1.914456 | -0.664474 | -0.226576 | 0.112420 | 0.235011 | 1 | 1 |
| 209 | -0.465823 | -1.372705 | -0.445436 | 0.316510 | -1.492946 | -1.103783 | 0.353513 | -0.311377 | -1.095388 | -0.615078 | -0.585868 | 0.172807 | -0.860564 | 1 | 1 |
| 210 | -0.594535 | -1.761364 | -1.069906 | -0.502969 | -1.411276 | -0.906350 | -0.559102 | -1.240920 | -2.254196 | -1.206339 | -0.528047 | 0.924112 | 0.472298 | 1 | 1 |
| 211 | -1.022693 | 0.373374 | -0.104205 | -0.815628 | -0.574733 | 0.906934 | 0.765114 | -0.015386 | 0.110695 | 1.832325 | 0.712557 | -0.951976 | -0.678869 | 2 | 1 |
| 212 | -0.967902 | 0.155275 | 0.013938 | -0.549105 | -0.907792 | 0.881907 | 0.609589 | -0.135010 | -0.373473 | 1.152134 | 0.386511 | -0.744687 | -0.447017 | 2 | 1 |
| 213 | -1.238242 | -0.062983 | -0.133082 | -0.158458 | -0.338086 | -0.411874 | 0.964537 | 0.870379 | 0.530337 | 0.858339 | 0.489332 | -1.190977 | -1.340484 | 2 | 1 |
| 214 | 0.349761 | -1.391267 | -3.069473 | 0.840195 | 1.044391 | -1.052018 | 1.004856 | 1.478511 | 1.210060 | -1.145325 | 2.653757 | 1.937234 | 0.592139 | 0 | 1 |
| 215 | 0.782819 | -1.300386 | -0.487318 | 0.850960 | -2.046427 | 1.050631 | 0.289069 | 2.400271 | 2.707288 | -0.278238 | 0.152360 | 1.912210 | -0.208225 | 0 | 1 |
| 216 | 1.847553 | -1.059174 | -0.808403 | 0.400706 | -0.275009 | 0.409744 | -0.141885 | 0.706348 | 0.476002 | 0.990111 | -0.168504 | 0.856440 | -0.395652 | 1 | 1 |
| 217 | 2.608478 | 0.174234 | 2.534211 | -0.985597 | -0.436400 | 3.751943 | 1.560179 | -2.367095 | 1.272529 | 2.464209 | -0.954336 | 0.310720 | -1.209456 | 0 | 1 |
| 218 | -0.069569 | 0.418008 | -0.004324 | 1.330358 | 0.365352 | -0.582788 | -0.527444 | -0.298114 | -0.353021 | -1.118883 | -0.459230 | -0.986241 | -0.041010 | 1 | 1 |
219 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82d379160>
from IPython.display import display, Markdown, Latex
display(Markdown('## '+companies[5]))
X = df_n_ps_std_mfcc[5].drop(columns='Cluster')
y = df_n_ps[5]['chosen']
X_train, X_test, y_train, y_test = train_test_split(X, y)
X_train.shape
(168, 13)
mlp = MLPClassifier(hidden_layer_sizes=(30,30,30))
activation_vec = ['logistic', 'relu', 'tanh']
max_iter_vec = [10, 20, 50, 75, 100, 200, 300, 400, 500, 1000, 2000]
hidden_layer_sizes_vec = [(10,), (20,), (30,), (10, 10), (20, 20), (30, 30), (20, 10),
(10, 10, 10), (20, 20, 20), (30, 30, 30), (30, 20, 10)]
learning_rate_init_vec = [0.001, 0.002, 0.003, 0.004, 0.005, 0.006, 0.007, 0.008, 0.009, 0.01, 0.02]
batch_size_vec = [10, 20, 40, 60, 80, 100, 150]
import time
start = time.time() # Devuelve el tiempo actual en segundos desde el 1o de enero de 1970 (punto de referencia)
np.random.seed(1234)
parametros = {'activation': activation_vec,
'max_iter':max_iter_vec,
'hidden_layer_sizes': hidden_layer_sizes_vec,
'learning_rate_init': learning_rate_init_vec#,
#'batch_size': batch_size_vec
}
scoring = {'kappa':make_scorer(cohen_kappa_score), 'accuracy':'accuracy'}
grid = GridSearchCV(mlp, param_grid=parametros, cv=5, scoring=scoring, refit='accuracy', n_jobs=-1, iid=True)
grid.fit(X_train, y_train)
print("Los parámetros del mejor modelo fueron {0}, que permiten obtener un Accuracy de {1:.2f}% y un Kappa del {2:.2f}".format(
grid.best_params_, grid.best_score_*100, grid.cv_results_['mean_test_kappa'][grid.best_index_]*100))
end = time.time() # Tiempo después de finalizar el entrenamiento del modelo
print("Tiempo total: {0:.2f} minutos".format((end-start)/60))
n0=X_train.shape[1]
### hidden_layer_sizes
ns = []
for i in range (len(grid.best_params_['hidden_layer_sizes'])):
ns.append(grid.best_params_['hidden_layer_sizes'][i])
ns.append(1)
lr = grid.best_params_['learning_rate_init']
epochs = grid.best_params_['max_iter']
input_tensor = Input(shape = (n0,))
hidden_outputs = [input_tensor]
for i in range (len(ns)-1):
hidden_outputs.append(Dense(ns[i], activation = 'tanh')(hidden_outputs[i]))
classification_output = Dense(ns[-1], activation = 'sigmoid')(hidden_outputs[-1])
model = Model([input_tensor], [classification_output])
weights = model.get_weights()
model.summary()
Model: "model_2" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) (None, 13) 0 _________________________________________________________________ dense_4 (Dense) (None, 20) 280 _________________________________________________________________ dense_5 (Dense) (None, 20) 420 _________________________________________________________________ dense_6 (Dense) (None, 20) 420 _________________________________________________________________ dense_7 (Dense) (None, 1) 21 ================================================================= Total params: 1,141 Trainable params: 1,141 Non-trainable params: 0 _________________________________________________________________
model.set_weights(weights)
adam = keras.optimizers.Adam(lr=lr)
model.compile(optimizer=adam, loss='binary_crossentropy', metrics=['accuracy'])
history = model.fit(X_train, y_train, epochs=epochs, validation_data=(X_test, y_test), batch_size= 32,
callbacks=[
keras.callbacks.ReduceLROnPlateau(
monitor='val_accuracy', factor=0.5, patience=10, min_delta=0.01, verbose=1
)
]
)
Train on 191 samples, validate on 64 samples Epoch 1/2000 191/191 [==============================] - 2s 10ms/step - loss: 0.6523 - accuracy: 0.5864 - val_loss: 0.5471 - val_accuracy: 0.7031 Epoch 2/2000 191/191 [==============================] - 0s 89us/step - loss: 0.4800 - accuracy: 0.7696 - val_loss: 0.5294 - val_accuracy: 0.7500 Epoch 3/2000 191/191 [==============================] - 0s 68us/step - loss: 0.4421 - accuracy: 0.8010 - val_loss: 0.5305 - val_accuracy: 0.7812 Epoch 4/2000 191/191 [==============================] - 0s 63us/step - loss: 0.4243 - accuracy: 0.8168 - val_loss: 0.5390 - val_accuracy: 0.7500 Epoch 5/2000 191/191 [==============================] - 0s 73us/step - loss: 0.4080 - accuracy: 0.8168 - val_loss: 0.5368 - val_accuracy: 0.7500 Epoch 6/2000 191/191 [==============================] - 0s 58us/step - loss: 0.3924 - accuracy: 0.8168 - val_loss: 0.5567 - val_accuracy: 0.7188 Epoch 7/2000 191/191 [==============================] - 0s 89us/step - loss: 0.3782 - accuracy: 0.8168 - val_loss: 0.5325 - val_accuracy: 0.7500 Epoch 8/2000 191/191 [==============================] - 0s 63us/step - loss: 0.3562 - accuracy: 0.8429 - val_loss: 0.5605 - val_accuracy: 0.7500 Epoch 9/2000 191/191 [==============================] - 0s 63us/step - loss: 0.3462 - accuracy: 0.8586 - val_loss: 0.5968 - val_accuracy: 0.7188 Epoch 10/2000 191/191 [==============================] - 0s 89us/step - loss: 0.3171 - accuracy: 0.8639 - val_loss: 0.5665 - val_accuracy: 0.7500 Epoch 11/2000 191/191 [==============================] - 0s 68us/step - loss: 0.3107 - accuracy: 0.8639 - val_loss: 0.5456 - val_accuracy: 0.7656 Epoch 12/2000 191/191 [==============================] - 0s 68us/step - loss: 0.2855 - accuracy: 0.8743 - val_loss: 0.6107 - val_accuracy: 0.7500 Epoch 13/2000 191/191 [==============================] - 0s 58us/step - loss: 0.2626 - accuracy: 0.8848 - val_loss: 0.6076 - val_accuracy: 0.7656 Epoch 00013: ReduceLROnPlateau reducing learning rate to 0.004999999888241291. Epoch 14/2000 191/191 [==============================] - 0s 94us/step - loss: 0.2396 - accuracy: 0.9215 - val_loss: 0.5932 - val_accuracy: 0.7656 Epoch 15/2000 191/191 [==============================] - 0s 68us/step - loss: 0.2269 - accuracy: 0.9162 - val_loss: 0.5980 - val_accuracy: 0.7812 Epoch 16/2000 191/191 [==============================] - 0s 63us/step - loss: 0.2133 - accuracy: 0.9372 - val_loss: 0.6289 - val_accuracy: 0.7656 Epoch 17/2000 191/191 [==============================] - 0s 68us/step - loss: 0.2023 - accuracy: 0.9372 - val_loss: 0.6384 - val_accuracy: 0.7656 Epoch 18/2000 191/191 [==============================] - 0s 73us/step - loss: 0.1904 - accuracy: 0.9424 - val_loss: 0.6197 - val_accuracy: 0.7656 Epoch 19/2000 191/191 [==============================] - 0s 63us/step - loss: 0.1817 - accuracy: 0.9372 - val_loss: 0.6255 - val_accuracy: 0.7656 Epoch 20/2000 191/191 [==============================] - 0s 68us/step - loss: 0.1725 - accuracy: 0.9424 - val_loss: 0.6415 - val_accuracy: 0.7500 Epoch 21/2000 191/191 [==============================] - 0s 73us/step - loss: 0.1603 - accuracy: 0.9424 - val_loss: 0.6400 - val_accuracy: 0.7656 Epoch 22/2000 191/191 [==============================] - 0s 58us/step - loss: 0.1473 - accuracy: 0.9581 - val_loss: 0.6420 - val_accuracy: 0.7500 Epoch 23/2000 191/191 [==============================] - 0s 58us/step - loss: 0.1367 - accuracy: 0.9738 - val_loss: 0.6448 - val_accuracy: 0.7656 Epoch 00023: ReduceLROnPlateau reducing learning rate to 0.0024999999441206455. Epoch 24/2000 191/191 [==============================] - 0s 68us/step - loss: 0.1248 - accuracy: 0.9791 - val_loss: 0.6578 - val_accuracy: 0.7812 Epoch 25/2000 191/191 [==============================] - 0s 63us/step - loss: 0.1211 - accuracy: 0.9738 - val_loss: 0.6633 - val_accuracy: 0.7812 Epoch 26/2000 191/191 [==============================] - 0s 58us/step - loss: 0.1166 - accuracy: 0.9791 - val_loss: 0.6651 - val_accuracy: 0.7656 Epoch 27/2000 191/191 [==============================] - 0s 110us/step - loss: 0.1121 - accuracy: 0.9791 - val_loss: 0.6655 - val_accuracy: 0.7500 Epoch 28/2000 191/191 [==============================] - 0s 73us/step - loss: 0.1069 - accuracy: 0.9791 - val_loss: 0.6765 - val_accuracy: 0.7656 Epoch 29/2000 191/191 [==============================] - 0s 63us/step - loss: 0.1026 - accuracy: 0.9791 - val_loss: 0.6865 - val_accuracy: 0.7656 Epoch 30/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0981 - accuracy: 0.9843 - val_loss: 0.6846 - val_accuracy: 0.7656 Epoch 31/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0922 - accuracy: 0.9843 - val_loss: 0.6932 - val_accuracy: 0.7656 Epoch 32/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0883 - accuracy: 0.9843 - val_loss: 0.7066 - val_accuracy: 0.7656 Epoch 33/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0829 - accuracy: 0.9895 - val_loss: 0.7111 - val_accuracy: 0.7656 Epoch 00033: ReduceLROnPlateau reducing learning rate to 0.0012499999720603228. Epoch 34/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0790 - accuracy: 0.9895 - val_loss: 0.7178 - val_accuracy: 0.7656 Epoch 35/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0767 - accuracy: 0.9895 - val_loss: 0.7203 - val_accuracy: 0.7656 Epoch 36/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0752 - accuracy: 0.9895 - val_loss: 0.7267 - val_accuracy: 0.7656 Epoch 37/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0729 - accuracy: 0.9895 - val_loss: 0.7269 - val_accuracy: 0.7656 Epoch 38/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0705 - accuracy: 0.9895 - val_loss: 0.7364 - val_accuracy: 0.7656 Epoch 39/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0686 - accuracy: 0.9895 - val_loss: 0.7434 - val_accuracy: 0.7656 Epoch 40/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0668 - accuracy: 0.9895 - val_loss: 0.7461 - val_accuracy: 0.7812 Epoch 41/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0648 - accuracy: 0.9895 - val_loss: 0.7476 - val_accuracy: 0.7812 Epoch 42/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0631 - accuracy: 0.9895 - val_loss: 0.7577 - val_accuracy: 0.7812 Epoch 43/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0610 - accuracy: 0.9895 - val_loss: 0.7680 - val_accuracy: 0.7812 Epoch 00043: ReduceLROnPlateau reducing learning rate to 0.0006249999860301614. Epoch 44/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0592 - accuracy: 0.9895 - val_loss: 0.7672 - val_accuracy: 0.7812 Epoch 45/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0582 - accuracy: 0.9895 - val_loss: 0.7674 - val_accuracy: 0.7812 Epoch 46/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0576 - accuracy: 0.9895 - val_loss: 0.7688 - val_accuracy: 0.7812 Epoch 47/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0566 - accuracy: 0.9895 - val_loss: 0.7700 - val_accuracy: 0.7812 Epoch 48/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0557 - accuracy: 0.9895 - val_loss: 0.7748 - val_accuracy: 0.7812 Epoch 49/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0547 - accuracy: 0.9895 - val_loss: 0.7777 - val_accuracy: 0.7812 Epoch 50/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0540 - accuracy: 0.9948 - val_loss: 0.7821 - val_accuracy: 0.7812 Epoch 51/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0533 - accuracy: 0.9948 - val_loss: 0.7849 - val_accuracy: 0.7812 Epoch 52/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0525 - accuracy: 0.9948 - val_loss: 0.7842 - val_accuracy: 0.7812 Epoch 53/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0515 - accuracy: 0.9948 - val_loss: 0.7869 - val_accuracy: 0.7812 Epoch 00053: ReduceLROnPlateau reducing learning rate to 0.0003124999930150807. Epoch 54/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0509 - accuracy: 0.9948 - val_loss: 0.7898 - val_accuracy: 0.7812 Epoch 55/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0504 - accuracy: 0.9948 - val_loss: 0.7899 - val_accuracy: 0.7812 Epoch 56/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0500 - accuracy: 0.9948 - val_loss: 0.7926 - val_accuracy: 0.7812 Epoch 57/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0496 - accuracy: 0.9948 - val_loss: 0.7942 - val_accuracy: 0.7812 Epoch 58/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0492 - accuracy: 0.9948 - val_loss: 0.7951 - val_accuracy: 0.7812 Epoch 59/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0487 - accuracy: 0.9948 - val_loss: 0.7963 - val_accuracy: 0.7812 Epoch 60/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0484 - accuracy: 0.9948 - val_loss: 0.7975 - val_accuracy: 0.7812 Epoch 61/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0480 - accuracy: 0.9948 - val_loss: 0.7996 - val_accuracy: 0.7812 Epoch 62/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0476 - accuracy: 0.9948 - val_loss: 0.8015 - val_accuracy: 0.7812 Epoch 63/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0472 - accuracy: 0.9948 - val_loss: 0.8032 - val_accuracy: 0.7656 Epoch 00063: ReduceLROnPlateau reducing learning rate to 0.00015624999650754035. Epoch 64/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0468 - accuracy: 0.9948 - val_loss: 0.8047 - val_accuracy: 0.7656 Epoch 65/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0466 - accuracy: 0.9948 - val_loss: 0.8048 - val_accuracy: 0.7656 Epoch 66/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0464 - accuracy: 0.9948 - val_loss: 0.8054 - val_accuracy: 0.7656 Epoch 67/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0462 - accuracy: 0.9948 - val_loss: 0.8063 - val_accuracy: 0.7656 Epoch 68/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0461 - accuracy: 0.9948 - val_loss: 0.8067 - val_accuracy: 0.7656 Epoch 69/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0459 - accuracy: 0.9948 - val_loss: 0.8076 - val_accuracy: 0.7656 Epoch 70/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0456 - accuracy: 0.9948 - val_loss: 0.8089 - val_accuracy: 0.7656 Epoch 71/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0455 - accuracy: 0.9948 - val_loss: 0.8099 - val_accuracy: 0.7656 Epoch 72/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0452 - accuracy: 0.9948 - val_loss: 0.8107 - val_accuracy: 0.7656 Epoch 73/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0451 - accuracy: 0.9948 - val_loss: 0.8106 - val_accuracy: 0.7656 Epoch 00073: ReduceLROnPlateau reducing learning rate to 7.812499825377017e-05. Epoch 74/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0449 - accuracy: 0.9948 - val_loss: 0.8107 - val_accuracy: 0.7656 Epoch 75/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0448 - accuracy: 0.9948 - val_loss: 0.8113 - val_accuracy: 0.7656 Epoch 76/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0447 - accuracy: 0.9948 - val_loss: 0.8118 - val_accuracy: 0.7656 Epoch 77/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0446 - accuracy: 0.9948 - val_loss: 0.8122 - val_accuracy: 0.7656 Epoch 78/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0445 - accuracy: 0.9948 - val_loss: 0.8128 - val_accuracy: 0.7656 Epoch 79/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0444 - accuracy: 0.9948 - val_loss: 0.8131 - val_accuracy: 0.7656 Epoch 80/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0443 - accuracy: 0.9948 - val_loss: 0.8137 - val_accuracy: 0.7656 Epoch 81/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0442 - accuracy: 0.9948 - val_loss: 0.8142 - val_accuracy: 0.7656 Epoch 82/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0441 - accuracy: 0.9948 - val_loss: 0.8146 - val_accuracy: 0.7656 Epoch 83/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0440 - accuracy: 0.9948 - val_loss: 0.8150 - val_accuracy: 0.7656 Epoch 00083: ReduceLROnPlateau reducing learning rate to 3.9062499126885086e-05. Epoch 84/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0439 - accuracy: 0.9948 - val_loss: 0.8152 - val_accuracy: 0.7656 Epoch 85/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0439 - accuracy: 0.9948 - val_loss: 0.8155 - val_accuracy: 0.7656 Epoch 86/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0438 - accuracy: 0.9948 - val_loss: 0.8155 - val_accuracy: 0.7656 Epoch 87/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0438 - accuracy: 0.9948 - val_loss: 0.8159 - val_accuracy: 0.7656 Epoch 88/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0437 - accuracy: 0.9948 - val_loss: 0.8161 - val_accuracy: 0.7656 Epoch 89/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0437 - accuracy: 0.9948 - val_loss: 0.8162 - val_accuracy: 0.7656 Epoch 90/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0436 - accuracy: 0.9948 - val_loss: 0.8164 - val_accuracy: 0.7656 Epoch 91/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0436 - accuracy: 0.9948 - val_loss: 0.8166 - val_accuracy: 0.7656 Epoch 92/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0435 - accuracy: 0.9948 - val_loss: 0.8170 - val_accuracy: 0.7656 Epoch 93/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0435 - accuracy: 0.9948 - val_loss: 0.8171 - val_accuracy: 0.7656 Epoch 00093: ReduceLROnPlateau reducing learning rate to 1.9531249563442543e-05. Epoch 94/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0434 - accuracy: 0.9948 - val_loss: 0.8172 - val_accuracy: 0.7656 Epoch 95/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0434 - accuracy: 0.9948 - val_loss: 0.8174 - val_accuracy: 0.7656 Epoch 96/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0434 - accuracy: 0.9948 - val_loss: 0.8175 - val_accuracy: 0.7656 Epoch 97/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8174 - val_accuracy: 0.7656 Epoch 98/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8176 - val_accuracy: 0.7656 Epoch 99/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8177 - val_accuracy: 0.7656 Epoch 100/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0433 - accuracy: 0.9948 - val_loss: 0.8179 - val_accuracy: 0.7656 Epoch 101/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8180 - val_accuracy: 0.7656 Epoch 102/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8180 - val_accuracy: 0.7656 Epoch 103/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8182 - val_accuracy: 0.7656 Epoch 00103: ReduceLROnPlateau reducing learning rate to 9.765624781721272e-06. Epoch 104/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0432 - accuracy: 0.9948 - val_loss: 0.8182 - val_accuracy: 0.7656 Epoch 105/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8183 - val_accuracy: 0.7656 Epoch 106/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8183 - val_accuracy: 0.7656 Epoch 107/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8184 - val_accuracy: 0.7656 Epoch 108/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8185 - val_accuracy: 0.7656 Epoch 109/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8185 - val_accuracy: 0.7656 Epoch 110/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8186 - val_accuracy: 0.7656 Epoch 111/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8186 - val_accuracy: 0.7656 Epoch 112/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0431 - accuracy: 0.9948 - val_loss: 0.8187 - val_accuracy: 0.7656 Epoch 113/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8187 - val_accuracy: 0.7656 Epoch 00113: ReduceLROnPlateau reducing learning rate to 4.882812390860636e-06. Epoch 114/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8188 - val_accuracy: 0.7656 Epoch 115/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8188 - val_accuracy: 0.7656 Epoch 116/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8188 - val_accuracy: 0.7656 Epoch 117/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8189 - val_accuracy: 0.7656 Epoch 118/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8189 - val_accuracy: 0.7656 Epoch 119/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8189 - val_accuracy: 0.7656 Epoch 120/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 121/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 122/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 123/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 00123: ReduceLROnPlateau reducing learning rate to 2.441406195430318e-06. Epoch 124/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8190 - val_accuracy: 0.7656 Epoch 125/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 126/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 127/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 128/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0430 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 129/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 130/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 131/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8191 - val_accuracy: 0.7656 Epoch 132/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 133/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 00133: ReduceLROnPlateau reducing learning rate to 1.220703097715159e-06. Epoch 134/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 135/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 136/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 137/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 138/2000 191/191 [==============================] - 0s 47us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 139/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 140/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 141/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 142/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 143/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8192 - val_accuracy: 0.7656 Epoch 00143: ReduceLROnPlateau reducing learning rate to 6.103515488575795e-07. Epoch 144/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 145/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 146/2000 191/191 [==============================] - 0s 52us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 147/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 148/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 149/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 150/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 151/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 152/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 153/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00153: ReduceLROnPlateau reducing learning rate to 3.0517577442878974e-07. Epoch 154/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 155/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 156/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 157/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 158/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 159/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 160/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 161/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 162/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 163/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00163: ReduceLROnPlateau reducing learning rate to 1.5258788721439487e-07. Epoch 164/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 165/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 166/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 167/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 168/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 169/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 170/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 171/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 172/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 173/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00173: ReduceLROnPlateau reducing learning rate to 7.629394360719743e-08. Epoch 174/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 175/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 176/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 177/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 178/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 179/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 180/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 181/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 182/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 183/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00183: ReduceLROnPlateau reducing learning rate to 3.814697180359872e-08. Epoch 184/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 185/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 186/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 187/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 188/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 189/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 190/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 191/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 192/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 193/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00193: ReduceLROnPlateau reducing learning rate to 1.907348590179936e-08. Epoch 194/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 195/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 196/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 197/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 198/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 199/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 200/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 201/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 202/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 203/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00203: ReduceLROnPlateau reducing learning rate to 9.53674295089968e-09. Epoch 204/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 205/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 206/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 207/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 208/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 209/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 210/2000 191/191 [==============================] - ETA: 0s - loss: 0.0166 - accuracy: 1.00 - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 211/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 212/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 213/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00213: ReduceLROnPlateau reducing learning rate to 4.76837147544984e-09. Epoch 214/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 215/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 216/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 217/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 218/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 219/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 220/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 221/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 222/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 223/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00223: ReduceLROnPlateau reducing learning rate to 2.38418573772492e-09. Epoch 224/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 225/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 226/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 227/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 228/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 229/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 230/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 231/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 232/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 233/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00233: ReduceLROnPlateau reducing learning rate to 1.19209286886246e-09. Epoch 234/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 235/2000 191/191 [==============================] - ETA: 0s - loss: 0.0224 - accuracy: 1.00 - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 236/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 237/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 238/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 239/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 240/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 241/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 242/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 243/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00243: ReduceLROnPlateau reducing learning rate to 5.9604643443123e-10. Epoch 244/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 245/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 246/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 247/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 248/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 249/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 250/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 251/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 252/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 253/2000 191/191 [==============================] - ETA: 0s - loss: 0.0256 - accuracy: 1.00 - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00253: ReduceLROnPlateau reducing learning rate to 2.98023217215615e-10. Epoch 254/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 255/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 256/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 257/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 258/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 259/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 260/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 261/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 262/2000 191/191 [==============================] - ETA: 0s - loss: 0.0223 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 263/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00263: ReduceLROnPlateau reducing learning rate to 1.490116086078075e-10. Epoch 264/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 265/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 266/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 267/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 268/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 269/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 270/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 271/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 272/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 273/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00273: ReduceLROnPlateau reducing learning rate to 7.450580430390374e-11. Epoch 274/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 275/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 276/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 277/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 278/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 279/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 280/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 281/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 282/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 283/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00283: ReduceLROnPlateau reducing learning rate to 3.725290215195187e-11. Epoch 284/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 285/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 286/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 287/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 288/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 289/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 290/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 291/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 292/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 293/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00293: ReduceLROnPlateau reducing learning rate to 1.8626451075975936e-11. Epoch 294/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 295/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 296/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 297/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 298/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 299/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 300/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 301/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 302/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 303/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00303: ReduceLROnPlateau reducing learning rate to 9.313225537987968e-12. Epoch 304/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 305/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 306/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 307/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 308/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 309/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 310/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 311/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 312/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 313/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00313: ReduceLROnPlateau reducing learning rate to 4.656612768993984e-12. Epoch 314/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 315/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 316/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 317/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 318/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 319/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 320/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 321/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 322/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 323/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00323: ReduceLROnPlateau reducing learning rate to 2.328306384496992e-12. Epoch 324/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 325/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 326/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 327/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 328/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 329/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 330/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 331/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 332/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 333/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00333: ReduceLROnPlateau reducing learning rate to 1.164153192248496e-12. Epoch 334/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 335/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 336/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 337/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 338/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 339/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 340/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 341/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 342/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 343/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00343: ReduceLROnPlateau reducing learning rate to 5.82076596124248e-13. Epoch 344/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 345/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 346/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 347/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 348/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 349/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 350/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 351/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 352/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 353/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00353: ReduceLROnPlateau reducing learning rate to 2.91038298062124e-13. Epoch 354/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 355/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 356/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 357/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 358/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 359/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 360/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 361/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 362/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 363/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00363: ReduceLROnPlateau reducing learning rate to 1.45519149031062e-13. Epoch 364/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 365/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 366/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 367/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 368/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 369/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 370/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 371/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 372/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 373/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00373: ReduceLROnPlateau reducing learning rate to 7.2759574515531e-14. Epoch 374/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 375/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 376/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 377/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 378/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 379/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 380/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 381/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 382/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 383/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00383: ReduceLROnPlateau reducing learning rate to 3.63797872577655e-14. Epoch 384/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 385/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 386/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 387/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 388/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 389/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 390/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 391/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 392/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 393/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00393: ReduceLROnPlateau reducing learning rate to 1.818989362888275e-14. Epoch 394/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 395/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 396/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 397/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 398/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 399/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 400/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 401/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 402/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 403/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00403: ReduceLROnPlateau reducing learning rate to 9.094946814441375e-15. Epoch 404/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 405/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 406/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 407/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 408/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 409/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 410/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 411/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 412/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 413/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00413: ReduceLROnPlateau reducing learning rate to 4.5474734072206875e-15. Epoch 414/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 415/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 416/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 417/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 418/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 419/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 420/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 421/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 422/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 423/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00423: ReduceLROnPlateau reducing learning rate to 2.2737367036103438e-15. Epoch 424/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 425/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 426/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 427/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 428/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 429/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 430/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 431/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 432/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 433/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00433: ReduceLROnPlateau reducing learning rate to 1.1368683518051719e-15. Epoch 434/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 435/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 436/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 437/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 438/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 439/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 440/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 441/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 442/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 443/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00443: ReduceLROnPlateau reducing learning rate to 5.684341759025859e-16. Epoch 444/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 445/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 446/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 447/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 448/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 449/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 450/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 451/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 452/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 453/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00453: ReduceLROnPlateau reducing learning rate to 2.8421708795129297e-16. Epoch 454/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 455/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 456/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 457/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 458/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 459/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 460/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 461/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 462/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 463/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00463: ReduceLROnPlateau reducing learning rate to 1.4210854397564648e-16. Epoch 464/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 465/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 466/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 467/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 468/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 469/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 470/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 471/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 472/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 473/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00473: ReduceLROnPlateau reducing learning rate to 7.105427198782324e-17. Epoch 474/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 475/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 476/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 477/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 478/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 479/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 480/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 481/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 482/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 483/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00483: ReduceLROnPlateau reducing learning rate to 3.552713599391162e-17. Epoch 484/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 485/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 486/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 487/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 488/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 489/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 490/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 491/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 492/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 493/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00493: ReduceLROnPlateau reducing learning rate to 1.776356799695581e-17. Epoch 494/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 495/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 496/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 497/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 498/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 499/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 500/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 501/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 502/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 503/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00503: ReduceLROnPlateau reducing learning rate to 8.881783998477905e-18. Epoch 504/2000 191/191 [==============================] - 0s 152us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 505/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 506/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 507/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 508/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 509/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 510/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 511/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 512/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 513/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00513: ReduceLROnPlateau reducing learning rate to 4.440891999238953e-18. Epoch 514/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 515/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 516/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 517/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 518/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 519/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 520/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 521/2000 191/191 [==============================] - 0s 141us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 522/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 523/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00523: ReduceLROnPlateau reducing learning rate to 2.2204459996194763e-18. Epoch 524/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 525/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 526/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 527/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 528/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 529/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 530/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 531/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 532/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 533/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00533: ReduceLROnPlateau reducing learning rate to 1.1102229998097382e-18. Epoch 534/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 535/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 536/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 537/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 538/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 539/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 540/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 541/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 542/2000 191/191 [==============================] - ETA: 0s - loss: 0.0396 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 543/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00543: ReduceLROnPlateau reducing learning rate to 5.551114999048691e-19. Epoch 544/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 545/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 546/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 547/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 548/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 549/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 550/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 551/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 552/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 553/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00553: ReduceLROnPlateau reducing learning rate to 2.7755574995243454e-19. Epoch 554/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 555/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 556/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 557/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 558/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 559/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 560/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 561/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 562/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 563/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00563: ReduceLROnPlateau reducing learning rate to 1.3877787497621727e-19. Epoch 564/2000 191/191 [==============================] - 0s 303us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 565/2000 191/191 [==============================] - 0s 288us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 566/2000 191/191 [==============================] - 0s 194us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 567/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 568/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 569/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 570/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 571/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 572/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 573/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00573: ReduceLROnPlateau reducing learning rate to 6.938893748810864e-20. Epoch 574/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 575/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 576/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 577/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 578/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 579/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 580/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 581/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 582/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 583/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00583: ReduceLROnPlateau reducing learning rate to 3.469446874405432e-20. Epoch 584/2000 191/191 [==============================] - 0s 152us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 585/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 586/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 587/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 588/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 589/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 590/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 591/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 592/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 593/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00593: ReduceLROnPlateau reducing learning rate to 1.734723437202716e-20. Epoch 594/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 595/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 596/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 597/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 598/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 599/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 600/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 601/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 602/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 603/2000 191/191 [==============================] - 0s 173us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00603: ReduceLROnPlateau reducing learning rate to 8.67361718601358e-21. Epoch 604/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 605/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 606/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 607/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 608/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 609/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 610/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 611/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 612/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 613/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00613: ReduceLROnPlateau reducing learning rate to 4.33680859300679e-21. Epoch 614/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 615/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 616/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 617/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 618/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 619/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 620/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 621/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 622/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 623/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00623: ReduceLROnPlateau reducing learning rate to 2.168404296503395e-21. Epoch 624/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 625/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 626/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 627/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 628/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 629/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 630/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 631/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 632/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 633/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00633: ReduceLROnPlateau reducing learning rate to 1.0842021482516974e-21. Epoch 634/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 635/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 636/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 637/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 638/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 639/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 640/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 641/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 642/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 643/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00643: ReduceLROnPlateau reducing learning rate to 5.421010741258487e-22. Epoch 644/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 645/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 646/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 647/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 648/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 649/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 650/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 651/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 652/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 653/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00653: ReduceLROnPlateau reducing learning rate to 2.7105053706292436e-22. Epoch 654/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 655/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 656/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 657/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 658/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 659/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 660/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 661/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 662/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 663/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00663: ReduceLROnPlateau reducing learning rate to 1.3552526853146218e-22. Epoch 664/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 665/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 666/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 667/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 668/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 669/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 670/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 671/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 672/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 673/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00673: ReduceLROnPlateau reducing learning rate to 6.776263426573109e-23. Epoch 674/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 675/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 676/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 677/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 678/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 679/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 680/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 681/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 682/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 683/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00683: ReduceLROnPlateau reducing learning rate to 3.3881317132865545e-23. Epoch 684/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 685/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 686/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 687/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 688/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 689/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 690/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 691/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 692/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 693/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00693: ReduceLROnPlateau reducing learning rate to 1.6940658566432772e-23. Epoch 694/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 695/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 696/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 697/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 698/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 699/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 700/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 701/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 702/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 703/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00703: ReduceLROnPlateau reducing learning rate to 8.470329283216386e-24. Epoch 704/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 705/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 706/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 707/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 708/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 709/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 710/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 711/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 712/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 713/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00713: ReduceLROnPlateau reducing learning rate to 4.235164641608193e-24. Epoch 714/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 715/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 716/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 717/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 718/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 719/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 720/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 721/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 722/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 723/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00723: ReduceLROnPlateau reducing learning rate to 2.1175823208040965e-24. Epoch 724/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 725/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 726/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 727/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 728/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 729/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 730/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 731/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 732/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 733/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00733: ReduceLROnPlateau reducing learning rate to 1.0587911604020483e-24. Epoch 734/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 735/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 736/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 737/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 738/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 739/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 740/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 741/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 742/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 743/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00743: ReduceLROnPlateau reducing learning rate to 5.293955802010241e-25. Epoch 744/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 745/2000 191/191 [==============================] - ETA: 0s - loss: 0.0317 - accuracy: 1.00 - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 746/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 747/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 748/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 749/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 750/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 751/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 752/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 753/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00753: ReduceLROnPlateau reducing learning rate to 2.6469779010051207e-25. Epoch 754/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 755/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 756/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 757/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 758/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 759/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 760/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 761/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 762/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 763/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00763: ReduceLROnPlateau reducing learning rate to 1.3234889505025603e-25. Epoch 764/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 765/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 766/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 767/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 768/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 769/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 770/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 771/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 772/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 773/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00773: ReduceLROnPlateau reducing learning rate to 6.617444752512802e-26. Epoch 774/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 775/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 776/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 777/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 778/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 779/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 780/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 781/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 782/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 783/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00783: ReduceLROnPlateau reducing learning rate to 3.308722376256401e-26. Epoch 784/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 785/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 786/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 787/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 788/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 789/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 790/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 791/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 792/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 793/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00793: ReduceLROnPlateau reducing learning rate to 1.6543611881282004e-26. Epoch 794/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 795/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 796/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 797/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 798/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 799/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 800/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 801/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 802/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 803/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00803: ReduceLROnPlateau reducing learning rate to 8.271805940641002e-27. Epoch 804/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 805/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 806/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 807/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 808/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 809/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 810/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 811/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 812/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 813/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00813: ReduceLROnPlateau reducing learning rate to 4.135902970320501e-27. Epoch 814/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 815/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 816/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 817/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 818/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 819/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 820/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 821/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 822/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 823/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00823: ReduceLROnPlateau reducing learning rate to 2.0679514851602505e-27. Epoch 824/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 825/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 826/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 827/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 828/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 829/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 830/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 831/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 832/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 833/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00833: ReduceLROnPlateau reducing learning rate to 1.0339757425801253e-27. Epoch 834/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 835/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 836/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 837/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 838/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 839/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 840/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 841/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 842/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 843/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00843: ReduceLROnPlateau reducing learning rate to 5.169878712900626e-28. Epoch 844/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 845/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 846/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 847/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 848/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 849/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 850/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 851/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 852/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 853/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00853: ReduceLROnPlateau reducing learning rate to 2.584939356450313e-28. Epoch 854/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 855/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 856/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 857/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 858/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 859/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 860/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 861/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 862/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 863/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00863: ReduceLROnPlateau reducing learning rate to 1.2924696782251566e-28. Epoch 864/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 865/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 866/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 867/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 868/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 869/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 870/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 871/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 872/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 873/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00873: ReduceLROnPlateau reducing learning rate to 6.462348391125783e-29. Epoch 874/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 875/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 876/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 877/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 878/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 879/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 880/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 881/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 882/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 883/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00883: ReduceLROnPlateau reducing learning rate to 3.2311741955628914e-29. Epoch 884/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 885/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 886/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 887/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 888/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 889/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 890/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 891/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 892/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 893/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00893: ReduceLROnPlateau reducing learning rate to 1.6155870977814457e-29. Epoch 894/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 895/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 896/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 897/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 898/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 899/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 900/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 901/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 902/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 903/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00903: ReduceLROnPlateau reducing learning rate to 8.077935488907229e-30. Epoch 904/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 905/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 906/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 907/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 908/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 909/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 910/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 911/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 912/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 913/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00913: ReduceLROnPlateau reducing learning rate to 4.038967744453614e-30. Epoch 914/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 915/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 916/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 917/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 918/2000 191/191 [==============================] - ETA: 0s - loss: 0.0296 - accuracy: 1.00 - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 919/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 920/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 921/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 922/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 923/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00923: ReduceLROnPlateau reducing learning rate to 2.019483872226807e-30. Epoch 924/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 925/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 926/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 927/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 928/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 929/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 930/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 931/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 932/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 933/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00933: ReduceLROnPlateau reducing learning rate to 1.0097419361134036e-30. Epoch 934/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 935/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 936/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 937/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 938/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 939/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 940/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 941/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 942/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 943/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00943: ReduceLROnPlateau reducing learning rate to 5.048709680567018e-31. Epoch 944/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 945/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 946/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 947/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 948/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 949/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 950/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 951/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 952/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 953/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00953: ReduceLROnPlateau reducing learning rate to 2.524354840283509e-31. Epoch 954/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 955/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 956/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 957/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 958/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 959/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 960/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 961/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 962/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 963/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00963: ReduceLROnPlateau reducing learning rate to 1.2621774201417545e-31. Epoch 964/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 965/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 966/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 967/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 968/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 969/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 970/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 971/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 972/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 973/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00973: ReduceLROnPlateau reducing learning rate to 6.310887100708772e-32. Epoch 974/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 975/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 976/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 977/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 978/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 979/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 980/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 981/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 982/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 983/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00983: ReduceLROnPlateau reducing learning rate to 3.155443550354386e-32. Epoch 984/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 985/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 986/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 987/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 988/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 989/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 990/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 991/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 992/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 993/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 00993: ReduceLROnPlateau reducing learning rate to 1.577721775177193e-32. Epoch 994/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 995/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 996/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 997/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 998/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 999/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1000/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1001/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1002/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1003/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01003: ReduceLROnPlateau reducing learning rate to 7.888608875885965e-33. Epoch 1004/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1005/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1006/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1007/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1008/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1009/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1010/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1011/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1012/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1013/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01013: ReduceLROnPlateau reducing learning rate to 3.944304437942983e-33. Epoch 1014/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1015/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1016/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1017/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1018/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1019/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1020/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1021/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1022/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1023/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01023: ReduceLROnPlateau reducing learning rate to 1.9721522189714914e-33. Epoch 1024/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1025/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1026/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1027/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1028/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1029/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1030/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1031/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1032/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1033/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01033: ReduceLROnPlateau reducing learning rate to 9.860761094857457e-34. Epoch 1034/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1035/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1036/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1037/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1038/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1039/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1040/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1041/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1042/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1043/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01043: ReduceLROnPlateau reducing learning rate to 4.930380547428728e-34. Epoch 1044/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1045/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1046/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1047/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1048/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1049/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1050/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1051/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1052/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1053/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01053: ReduceLROnPlateau reducing learning rate to 2.465190273714364e-34. Epoch 1054/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1055/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1056/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1057/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1058/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1059/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1060/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1061/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1062/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1063/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01063: ReduceLROnPlateau reducing learning rate to 1.232595136857182e-34. Epoch 1064/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1065/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1066/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1067/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1068/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1069/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1070/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1071/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1072/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1073/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01073: ReduceLROnPlateau reducing learning rate to 6.16297568428591e-35. Epoch 1074/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1075/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1076/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1077/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1078/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1079/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1080/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1081/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1082/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1083/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01083: ReduceLROnPlateau reducing learning rate to 3.081487842142955e-35. Epoch 1084/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1085/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1086/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1087/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1088/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1089/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1090/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1091/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1092/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1093/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01093: ReduceLROnPlateau reducing learning rate to 1.5407439210714776e-35. Epoch 1094/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1095/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1096/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1097/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1098/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1099/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1100/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1101/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1102/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1103/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01103: ReduceLROnPlateau reducing learning rate to 7.703719605357388e-36. Epoch 1104/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1105/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1106/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1107/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1108/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1109/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1110/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1111/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1112/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1113/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01113: ReduceLROnPlateau reducing learning rate to 3.851859802678694e-36. Epoch 1114/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1115/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1116/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1117/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1118/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1119/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1120/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1121/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1122/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1123/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01123: ReduceLROnPlateau reducing learning rate to 1.925929901339347e-36. Epoch 1124/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1125/2000 191/191 [==============================] - ETA: 0s - loss: 0.0433 - accuracy: 1.00 - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1126/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1127/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1128/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1129/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1130/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1131/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1132/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1133/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01133: ReduceLROnPlateau reducing learning rate to 9.629649506696735e-37. Epoch 1134/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1135/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1136/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1137/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1138/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1139/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1140/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1141/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1142/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1143/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01143: ReduceLROnPlateau reducing learning rate to 4.8148247533483676e-37. Epoch 1144/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1145/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1146/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1147/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1148/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1149/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1150/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1151/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1152/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1153/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01153: ReduceLROnPlateau reducing learning rate to 2.4074123766741838e-37. Epoch 1154/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1155/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1156/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1157/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1158/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1159/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1160/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1161/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1162/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1163/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01163: ReduceLROnPlateau reducing learning rate to 1.2037061883370919e-37. Epoch 1164/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1165/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1166/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1167/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1168/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1169/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1170/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1171/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1172/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1173/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01173: ReduceLROnPlateau reducing learning rate to 6.018530941685459e-38. Epoch 1174/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1175/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1176/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1177/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1178/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1179/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1180/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1181/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1182/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1183/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01183: ReduceLROnPlateau reducing learning rate to 3.0092654708427297e-38. Epoch 1184/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1185/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1186/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1187/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1188/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1189/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1190/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1191/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1192/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1193/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01193: ReduceLROnPlateau reducing learning rate to 1.5046327354213649e-38. Epoch 1194/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1195/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1196/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1197/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1198/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1199/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1200/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1201/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1202/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1203/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01203: ReduceLROnPlateau reducing learning rate to 7.523163677106824e-39. Epoch 1204/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1205/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1206/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1207/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1208/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1209/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1210/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1211/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1212/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1213/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01213: ReduceLROnPlateau reducing learning rate to 3.761581838553412e-39. Epoch 1214/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1215/2000 191/191 [==============================] - ETA: 0s - loss: 0.0311 - accuracy: 1.00 - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1216/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1217/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1218/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1219/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1220/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1221/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1222/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1223/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01223: ReduceLROnPlateau reducing learning rate to 1.88079056895209e-39. Epoch 1224/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1225/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1226/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1227/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1228/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1229/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1230/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1231/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1232/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1233/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01233: ReduceLROnPlateau reducing learning rate to 9.40395284476045e-40. Epoch 1234/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1235/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1236/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1237/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1238/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1239/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1240/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1241/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1242/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1243/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01243: ReduceLROnPlateau reducing learning rate to 4.701972919134064e-40. Epoch 1244/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1245/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1246/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1247/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1248/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1249/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1250/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1251/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1252/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1253/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01253: ReduceLROnPlateau reducing learning rate to 2.350986459567032e-40. Epoch 1254/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1255/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1256/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1257/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1258/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1259/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1260/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1261/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1262/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1263/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01263: ReduceLROnPlateau reducing learning rate to 1.175493229783516e-40. Epoch 1264/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1265/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1266/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1267/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1268/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1269/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1270/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1271/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1272/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1273/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01273: ReduceLROnPlateau reducing learning rate to 5.87746614891758e-41. Epoch 1274/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1275/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1276/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1277/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1278/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1279/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1280/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1281/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1282/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1283/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01283: ReduceLROnPlateau reducing learning rate to 2.93873307445879e-41. Epoch 1284/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1285/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1286/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1287/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1288/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1289/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1290/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1291/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1292/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1293/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01293: ReduceLROnPlateau reducing learning rate to 1.4694015696910032e-41. Epoch 1294/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1295/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1296/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1297/2000 191/191 [==============================] - ETA: 0s - loss: 0.0313 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1298/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1299/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1300/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1301/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1302/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1303/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01303: ReduceLROnPlateau reducing learning rate to 7.347007848455016e-42. Epoch 1304/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1305/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1306/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1307/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1308/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1309/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1310/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1311/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1312/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1313/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01313: ReduceLROnPlateau reducing learning rate to 3.673503924227508e-42. Epoch 1314/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1315/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1316/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1317/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1318/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1319/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1320/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1321/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1322/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1323/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01323: ReduceLROnPlateau reducing learning rate to 1.8371022867298352e-42. Epoch 1324/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1325/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1326/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1327/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1328/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1329/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1330/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1331/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1332/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1333/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01333: ReduceLROnPlateau reducing learning rate to 9.185511433649176e-43. Epoch 1334/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1335/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1336/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1337/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1338/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1339/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1340/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1341/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1342/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1343/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01343: ReduceLROnPlateau reducing learning rate to 4.5962589629854e-43. Epoch 1344/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1345/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1346/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1347/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1348/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1349/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1350/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1351/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1352/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1353/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01353: ReduceLROnPlateau reducing learning rate to 2.2981294814927e-43. Epoch 1354/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1355/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1356/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1357/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1358/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1359/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1360/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1361/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1362/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1363/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01363: ReduceLROnPlateau reducing learning rate to 1.14906474074635e-43. Epoch 1364/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1365/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1366/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1367/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1368/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1369/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1370/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1371/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1372/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1373/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01373: ReduceLROnPlateau reducing learning rate to 5.74532370373175e-44. Epoch 1374/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1375/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1376/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1377/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1378/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1379/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1380/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1381/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1382/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1383/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01383: ReduceLROnPlateau reducing learning rate to 2.872661851865875e-44. Epoch 1384/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1385/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1386/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1387/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1388/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1389/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1390/2000 191/191 [==============================] - ETA: 0s - loss: 0.0479 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1391/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1392/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1393/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01393: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-44. Epoch 1394/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1395/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1396/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1397/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1398/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1399/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1400/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1401/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1402/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1403/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01403: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-45. Epoch 1404/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1405/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1406/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1407/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1408/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1409/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1410/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1411/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1412/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1413/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01413: ReduceLROnPlateau reducing learning rate to 3.5032461608120427e-45. Epoch 1414/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1415/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1416/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1417/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1418/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1419/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1420/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1421/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1422/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1423/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01423: ReduceLROnPlateau reducing learning rate to 1.401298464324817e-45. Epoch 1424/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1425/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1426/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1427/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1428/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1429/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1430/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1431/2000 191/191 [==============================] - ETA: 0s - loss: 0.0420 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1432/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1433/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 01433: ReduceLROnPlateau reducing learning rate to 7.006492321624085e-46. Epoch 1434/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1435/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1436/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1437/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1438/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1439/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1440/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1441/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1442/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1443/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1444/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1445/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1446/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1447/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1448/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1449/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1450/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1451/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1452/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1453/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1454/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1455/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1456/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1457/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1458/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1459/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1460/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1461/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1462/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1463/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1464/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1465/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1466/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1467/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1468/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1469/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1470/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1471/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1472/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1473/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1474/2000 191/191 [==============================] - ETA: 0s - loss: 0.0491 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1475/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1476/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1477/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1478/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1479/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1480/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1481/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1482/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1483/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1484/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1485/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1486/2000 191/191 [==============================] - 0s 147us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1487/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1488/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1489/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1490/2000 191/191 [==============================] - 0s 114us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1491/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1492/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1493/2000 191/191 [==============================] - 0s 109us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1494/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1495/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1496/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1497/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1498/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1499/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1500/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1501/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1502/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1503/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1504/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1505/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1506/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1507/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1508/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1509/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1510/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1511/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1512/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1513/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1514/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1515/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1516/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1517/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1518/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1519/2000 191/191 [==============================] - 0s 95us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1520/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1521/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1522/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1523/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1524/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1525/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1526/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1527/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1528/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1529/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1530/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1531/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1532/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1533/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1534/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1535/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1536/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1537/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1538/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1539/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1540/2000 191/191 [==============================] - ETA: 0s - loss: 0.0384 - accuracy: 1.00 - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1541/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1542/2000 191/191 [==============================] - 0s 188us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1543/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1544/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1545/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1546/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1547/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1548/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1549/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1550/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1551/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1552/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1553/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1554/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1555/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1556/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1557/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1558/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1559/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1560/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1561/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1562/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1563/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1564/2000 191/191 [==============================] - ETA: 0s - loss: 0.0228 - accuracy: 1.00 - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1565/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1566/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1567/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1568/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1569/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1570/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1571/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1572/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1573/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1574/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1575/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1576/2000 191/191 [==============================] - 0s 111us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1577/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1578/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1579/2000 191/191 [==============================] - 0s 102us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1580/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1581/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1582/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1583/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1584/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1585/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1586/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1587/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1588/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1589/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1590/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1591/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1592/2000 191/191 [==============================] - 0s 101us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1593/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1594/2000 191/191 [==============================] - 0s 112us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1595/2000 191/191 [==============================] - 0s 106us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1596/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1597/2000 191/191 [==============================] - ETA: 0s - loss: 0.0252 - accuracy: 1.00 - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1598/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1599/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1600/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1601/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1602/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1603/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1604/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1605/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1606/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1607/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1608/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1609/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1610/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1611/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1612/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1613/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1614/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1615/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1616/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1617/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1618/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1619/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1620/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1621/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1622/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1623/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1624/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1625/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1626/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1627/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1628/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1629/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1630/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1631/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1632/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1633/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1634/2000 191/191 [==============================] - 0s 126us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1635/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1636/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1637/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1638/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1639/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1640/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1641/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1642/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1643/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1644/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1645/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1646/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1647/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1648/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1649/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1650/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1651/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1652/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1653/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1654/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1655/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1656/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1657/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1658/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1659/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1660/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1661/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1662/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1663/2000 191/191 [==============================] - 0s 152us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1664/2000 191/191 [==============================] - 0s 136us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1665/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1666/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1667/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1668/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1669/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1670/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1671/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1672/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1673/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1674/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1675/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1676/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1677/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1678/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1679/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1680/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1681/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1682/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1683/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1684/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1685/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1686/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1687/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1688/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1689/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1690/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1691/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1692/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1693/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1694/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1695/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1696/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1697/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1698/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1699/2000 191/191 [==============================] - 0s 131us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1700/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1701/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1702/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1703/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1704/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1705/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1706/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1707/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1708/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1709/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1710/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1711/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1712/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1713/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1714/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1715/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1716/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1717/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1718/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1719/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1720/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1721/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1722/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1723/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1724/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1725/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1726/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1727/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1728/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1729/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1730/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1731/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1732/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1733/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1734/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1735/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1736/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1737/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1738/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1739/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1740/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1741/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1742/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1743/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1744/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1745/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1746/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1747/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1748/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1749/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1750/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1751/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1752/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1753/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1754/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1755/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1756/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1757/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1758/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1759/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1760/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1761/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1762/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1763/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1764/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1765/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1766/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1767/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1768/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1769/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1770/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1771/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1772/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1773/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1774/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1775/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1776/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1777/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1778/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1779/2000 191/191 [==============================] - 0s 105us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1780/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1781/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1782/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1783/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1784/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1785/2000 191/191 [==============================] - 0s 120us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1786/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1787/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1788/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1789/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1790/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1791/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1792/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1793/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1794/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1795/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1796/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1797/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1798/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1799/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1800/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1801/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1802/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1803/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1804/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1805/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1806/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1807/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1808/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1809/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1810/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1811/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1812/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1813/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1814/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1815/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1816/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1817/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1818/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1819/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1820/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1821/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1822/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1823/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1824/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1825/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1826/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1827/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1828/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1829/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1830/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1831/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1832/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1833/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1834/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1835/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1836/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1837/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1838/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1839/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1840/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1841/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1842/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1843/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1844/2000 191/191 [==============================] - 0s 115us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1845/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1846/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1847/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1848/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1849/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1850/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1851/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1852/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1853/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1854/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1855/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1856/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1857/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1858/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1859/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1860/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1861/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1862/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1863/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1864/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1865/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1866/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1867/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1868/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1869/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1870/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1871/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1872/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1873/2000 191/191 [==============================] - ETA: 0s - loss: 0.0489 - accuracy: 1.00 - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1874/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1875/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1876/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1877/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1878/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1879/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1880/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1881/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1882/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1883/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1884/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1885/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1886/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1887/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1888/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1889/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1890/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1891/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1892/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1893/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1894/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1895/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1896/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1897/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1898/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1899/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1900/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1901/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1902/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1903/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1904/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1905/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1906/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1907/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1908/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1909/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1910/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1911/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1912/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1913/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1914/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1915/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1916/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1917/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1918/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1919/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1920/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1921/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1922/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1923/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1924/2000 191/191 [==============================] - 0s 79us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1925/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1926/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1927/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1928/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1929/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1930/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1931/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1932/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1933/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1934/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1935/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1936/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1937/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1938/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1939/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1940/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1941/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1942/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1943/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1944/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1945/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1946/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1947/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1948/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1949/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1950/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1951/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1952/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1953/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1954/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1955/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1956/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1957/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1958/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1959/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1960/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1961/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1962/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1963/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1964/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1965/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1966/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1967/2000 191/191 [==============================] - 0s 58us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1968/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1969/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1970/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1971/2000 191/191 [==============================] - ETA: 0s - loss: 0.0400 - accuracy: 1.00 - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1972/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1973/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1974/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1975/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1976/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1977/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1978/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1979/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1980/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1981/2000 191/191 [==============================] - 0s 110us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1982/2000 191/191 [==============================] - 0s 99us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1983/2000 191/191 [==============================] - ETA: 0s - loss: 0.0319 - accuracy: 1.00 - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1984/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1985/2000 191/191 [==============================] - 0s 73us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1986/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1987/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1988/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1989/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1990/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1991/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1992/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1993/2000 191/191 [==============================] - 0s 68us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1994/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1995/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1996/2000 191/191 [==============================] - 0s 94us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1997/2000 191/191 [==============================] - 0s 63us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1998/2000 191/191 [==============================] - 0s 78us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 1999/2000 191/191 [==============================] - 0s 89us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656 Epoch 2000/2000 191/191 [==============================] - 0s 84us/step - loss: 0.0429 - accuracy: 0.9948 - val_loss: 0.8193 - val_accuracy: 0.7656
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
print(epochs)
plt.plot(epochs, acc, 'bo', label='Training acc')
plt.plot(epochs, val_acc, 'b', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.show()
plt.plot(epochs, loss, 'bo', label='Training loss')
plt.plot(epochs, val_loss, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
range(0, 2000)
test_loss, test_acc = model.evaluate(X_test, y_test)
print("test loss: {}, test accuracy: {}".format(test_loss, test_acc))
64/64 [==============================] - 0s 47us/step test loss: 0.8193266093730927, test accuracy: 0.765625
y_pred = model.predict(X_test)
print("AUC ROC: ",roc_auc_score(y_test, y_pred))
AUC ROC: 0.7282608695652174
y_pred = list(map(lambda i: int(i>=0.5), y_pred))
print("Kappa: ",cohen_kappa_score(y_test, y_pred))
Kappa: 0.366754617414248
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.339415 | 0.847773 | 0.497198 | -0.389310 | 1.225458 | 1.947033 | -0.736267 | 0.492219 | 0.576682 | 1.504697 | -1.796460 | 0.724954 | 0.958600 |
| 1 | 0.587658 | -1.195426 | 0.636375 | 0.199876 | 0.765321 | 0.061181 | 0.379367 | -0.440867 | 0.232893 | 1.339920 | 0.110001 | 0.807525 | 0.815678 |
| 2 | 1.465595 | -2.307943 | 0.354567 | -0.058273 | -1.298853 | -0.811453 | -1.551580 | -3.934320 | -1.079432 | 2.546130 | 1.421407 | 0.639359 | 0.199094 |
| 3 | 0.749403 | -1.690498 | -0.125200 | -1.016135 | 0.825845 | 0.271444 | -0.104786 | -0.992141 | 0.049182 | 1.425948 | -0.343269 | -0.789558 | -0.411898 |
| 4 | -0.280577 | 0.393332 | 0.744917 | 2.411400 | -0.777421 | -0.420018 | 1.258355 | -1.544565 | -0.498071 | 0.421527 | -0.632908 | -0.056846 | -0.072348 |
| 5 | -0.158690 | 0.404891 | -0.147920 | -0.299241 | -0.786974 | 0.697216 | 0.290501 | 0.019739 | -1.468086 | -0.346174 | -0.086965 | 0.026492 | 1.019512 |
| 6 | 1.646777 | 0.772744 | -1.425228 | -0.562610 | -1.556076 | 0.533289 | -0.404271 | 1.676958 | 0.979516 | 0.415548 | 0.544719 | 0.433332 | 0.204271 |
| 7 | 1.124970 | 0.506236 | 0.738993 | 1.984485 | -0.928706 | -0.494097 | -0.707105 | -0.494778 | -1.642929 | 0.207467 | 0.181382 | 2.431721 | 0.848697 |
| 8 | 0.920059 | 1.438862 | -2.048354 | 1.503567 | -2.801303 | 0.567132 | -0.745441 | 0.569519 | 0.130917 | 1.965436 | -0.034797 | 1.164878 | 0.074074 |
| 9 | 0.182544 | 0.310622 | 0.067722 | 0.870138 | 0.168366 | 0.682045 | -0.191296 | -0.144962 | -0.630020 | -0.284032 | -0.315301 | 0.344841 | 0.495167 |
| 10 | 0.168663 | 0.389450 | 0.034360 | 1.213392 | 0.248437 | 0.870618 | -0.460824 | -0.174734 | -0.710502 | -0.228408 | -0.265153 | 0.349416 | 0.584114 |
| 11 | 0.153010 | -0.118336 | 0.639531 | 1.504522 | 0.937909 | 0.356048 | -0.089987 | -0.628522 | 0.064203 | 0.966049 | 0.403915 | -0.943626 | 0.173874 |
| 12 | 0.132578 | 0.261966 | -2.871493 | -3.398160 | -0.256458 | 1.596532 | -0.358711 | 0.175955 | -0.499075 | 0.949085 | 2.235525 | -0.197712 | -0.272366 |
| 13 | 1.094629 | 0.885150 | -1.130672 | -0.083270 | 0.672482 | 0.750453 | -0.863949 | 0.140540 | 0.423312 | -0.305155 | -0.424905 | 0.318660 | 0.885900 |
| 14 | 0.771472 | 0.364448 | -0.454696 | 0.434253 | 0.912699 | 0.745924 | -0.073390 | -0.406473 | 0.450765 | 0.323180 | -0.458826 | -0.132295 | 0.495454 |
| 15 | 0.677561 | 0.166795 | 0.746471 | 0.075191 | 0.867924 | -1.621678 | 0.771146 | -0.067286 | 0.557998 | -0.093593 | 0.020233 | -0.800013 | -0.629188 |
| 16 | -0.032353 | 1.227345 | -0.188580 | 0.927210 | 0.016663 | 1.001867 | -0.473811 | 0.782387 | 1.542760 | -0.345478 | -0.838104 | -0.439443 | 1.179204 |
| 17 | 0.459031 | 1.258961 | -0.329412 | 1.391790 | -0.208888 | 1.059241 | -1.245671 | 0.619153 | 0.245780 | 0.644548 | -0.602629 | -0.928581 | 0.739885 |
| 18 | -0.359172 | 0.051214 | -0.603962 | 0.778896 | 1.630471 | 1.802477 | 1.486205 | -0.140738 | -0.894366 | 0.736624 | 2.114721 | 1.078175 | -0.965785 |
| 19 | 0.209859 | -0.615399 | -0.676895 | 0.735655 | 0.805509 | -0.696793 | 1.073068 | 0.240429 | -0.205934 | -0.759693 | 0.672843 | 0.569482 | -0.455391 |
| 20 | 0.127381 | -0.265099 | -0.258801 | -0.127568 | 0.649447 | 0.244473 | 1.897421 | -0.344616 | -0.593159 | 0.065147 | 1.787607 | 1.219355 | -0.171813 |
| 21 | 1.222717 | 0.409860 | 1.311826 | 0.703873 | 0.322062 | 0.305461 | -0.522644 | -0.750833 | 0.001767 | 0.017953 | 0.254329 | -0.227762 | -0.614790 |
| 22 | 1.173352 | 0.490500 | 0.742825 | -0.028159 | -0.272396 | -0.502733 | -0.759443 | -1.031924 | -0.157975 | 0.075659 | 0.604220 | 0.143298 | -0.001849 |
| 23 | 1.069960 | 0.858822 | -0.795544 | 0.076688 | 0.851875 | 0.735014 | -0.758779 | 0.065595 | 0.532667 | -0.391858 | -0.497019 | 0.240822 | 0.848126 |
| 24 | 0.581377 | -0.804045 | 0.399887 | 1.535671 | 0.245878 | 0.904192 | -0.233991 | -0.925983 | 0.212280 | 0.499535 | -0.024926 | -0.925999 | 1.294925 |
| 25 | 0.161110 | 0.025075 | 0.716318 | 1.532230 | 0.889883 | 0.353167 | -0.058787 | -0.593046 | 0.093773 | 0.927085 | 0.199691 | -0.979872 | 0.232850 |
| 26 | 0.431443 | 0.442713 | 0.259120 | 0.045533 | 0.102675 | 0.367606 | 0.054320 | 0.942924 | 0.180609 | 0.550983 | 0.265291 | 0.321252 | -0.830969 |
| 27 | 0.344525 | -1.140315 | -0.725453 | -0.547965 | 0.449924 | 0.303904 | 1.053624 | 1.051712 | 0.509322 | 0.181611 | -0.519979 | -1.134490 | -1.439105 |
| 28 | -0.041565 | 0.671274 | 0.195143 | 0.247294 | 0.531620 | 1.050124 | 0.311358 | 0.988161 | -0.198869 | 0.387795 | 1.757366 | 1.351684 | 0.194840 |
| 29 | 0.417845 | -1.134173 | -0.760709 | -0.605264 | 0.077464 | 0.533333 | 1.104524 | 2.124971 | 0.083548 | 0.801730 | 0.092534 | -1.281628 | -1.468782 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 225 | 1.532114 | -1.060006 | -0.434145 | -0.999435 | -1.259462 | 0.039140 | -0.802013 | -0.655286 | 0.714448 | 1.005958 | -0.086372 | 0.537392 | 0.054440 |
| 226 | -0.942320 | 1.172080 | 0.506725 | -0.230675 | -0.104635 | 0.898742 | -1.107001 | -1.182148 | -0.940991 | 0.232366 | 1.778224 | 0.975251 | 1.731084 |
| 227 | 1.421974 | 0.631029 | -0.563813 | -0.694595 | -0.673270 | 0.929022 | 0.476907 | -1.025173 | -0.813644 | -0.060006 | -0.738730 | -0.558099 | 0.057654 |
| 228 | -1.473385 | -0.806223 | 1.849423 | -1.252541 | 0.941013 | -0.872947 | -1.812392 | -0.242718 | -0.097212 | -0.510500 | -0.232195 | -0.546399 | 0.945530 |
| 229 | -1.135926 | -0.772372 | 1.164844 | -1.022517 | 0.630202 | -0.496999 | -1.101656 | -0.168921 | -0.295159 | -0.587401 | 0.369033 | -0.266325 | 0.604469 |
| 230 | -1.085049 | 0.879566 | 0.442593 | 0.128917 | 0.393498 | 0.531555 | 0.392194 | 1.418515 | 0.891015 | -0.348926 | -0.756201 | -0.838584 | -0.015971 |
| 231 | -0.352258 | 0.556982 | 0.530520 | 0.443818 | 0.300921 | 0.032128 | -0.797384 | -0.573532 | 0.398084 | 0.328875 | -0.274964 | -1.300920 | 0.254456 |
| 232 | -1.190363 | 0.797356 | 0.758472 | 0.587917 | 0.890540 | 0.471925 | 0.105793 | 0.680721 | 0.230834 | -0.150709 | -0.816744 | -0.470618 | 0.371198 |
| 233 | -0.651003 | -0.586618 | 1.326854 | -0.451354 | 0.507113 | 0.165474 | -0.919675 | -0.448249 | -1.310940 | -1.372737 | 0.406029 | -1.414627 | -0.434858 |
| 234 | -1.459511 | -0.516281 | 1.631699 | -1.141842 | 0.584621 | -0.458541 | -1.428877 | -0.934556 | -0.216455 | -0.049794 | 0.095580 | 0.387068 | 0.693730 |
| 235 | -0.726984 | 0.702447 | 0.798069 | -0.320660 | 0.530902 | 1.019988 | 0.144995 | 0.207847 | 0.039592 | 0.220761 | 0.762941 | 0.575034 | 0.671517 |
| 236 | -0.300986 | -0.404923 | 0.715406 | 0.245380 | -0.427936 | -0.334843 | -0.228084 | -0.330898 | -0.674327 | 0.199560 | 0.827455 | 0.016433 | 0.866789 |
| 237 | -0.736244 | 0.088611 | 0.910051 | 0.437100 | 0.258256 | 0.363828 | -0.415290 | -0.717445 | -0.012727 | 0.436925 | -0.786954 | -1.217376 | 0.352825 |
| 238 | 0.610473 | -2.664315 | 1.303652 | -2.022376 | 1.500032 | -1.280926 | -1.249533 | 0.432111 | -0.768558 | 0.291156 | -0.092312 | 0.053770 | -0.401166 |
| 239 | -2.045424 | -2.954642 | 0.302601 | -0.868092 | -1.038134 | -1.230777 | 0.514329 | 0.057591 | -1.023895 | 0.275395 | -1.450282 | 0.386242 | 0.318763 |
| 240 | 0.329793 | -1.367570 | -1.454329 | -0.207924 | -0.723609 | -0.149025 | -0.085298 | -0.011595 | -0.240239 | -0.009120 | -0.325229 | -0.025722 | 0.114182 |
| 241 | -1.919591 | 1.382172 | -0.134161 | 0.837967 | -0.687780 | 0.944303 | -0.258652 | -0.742178 | 0.386031 | -1.178099 | -1.843543 | -0.710556 | -0.318561 |
| 242 | -2.087669 | 1.400006 | -0.494964 | 0.451717 | -0.759188 | 0.736625 | 0.133121 | -0.196031 | 1.121231 | 0.474128 | -0.345937 | -0.409324 | -0.442069 |
| 243 | -2.131652 | 0.439305 | -0.612226 | 0.854126 | -0.494550 | 0.825299 | 0.301373 | -0.018964 | 0.690556 | -0.078762 | -0.709495 | -0.075857 | -0.418656 |
| 244 | -1.611989 | -0.756403 | -0.410917 | 1.075909 | 0.297336 | -1.317576 | 1.115011 | -0.467065 | -0.768378 | 1.615499 | 1.611125 | -1.018782 | -1.798744 |
| 245 | -0.142010 | 0.000190 | -0.063461 | -0.506353 | -0.386942 | -0.256144 | 0.270621 | -1.497417 | 0.507892 | 0.456828 | -0.431169 | -0.978417 | 0.015849 |
| 246 | -1.263975 | -1.168117 | -1.396090 | -0.312016 | 1.862268 | 1.400290 | 0.646060 | -0.686864 | 0.418524 | -0.069926 | -0.653856 | -0.853617 | -0.106814 |
| 247 | -0.507700 | 0.899825 | 1.510153 | 1.083642 | 2.081451 | 0.589016 | 0.901321 | 0.658808 | 0.152596 | 0.176442 | -0.447633 | 0.287838 | 0.650479 |
| 248 | -0.159768 | 0.518093 | 2.197018 | 0.698491 | 0.476336 | -2.014255 | -1.614667 | -0.397282 | -1.781932 | -0.208894 | 1.650551 | -0.771436 | -0.987237 |
| 249 | -1.037899 | 1.016712 | 2.774230 | 0.665468 | -0.385673 | 0.587263 | -0.121609 | -0.331379 | 0.622484 | -0.387131 | -0.276584 | 0.218207 | 1.689216 |
| 250 | -0.526923 | -1.169944 | 0.474875 | -0.789231 | 0.369827 | -0.537003 | -1.089843 | -0.173366 | -0.023237 | -0.142334 | 0.740065 | 0.813114 | 0.872556 |
| 251 | -0.770856 | -1.024349 | -0.019140 | -0.097521 | 0.092703 | 0.369242 | -0.273901 | 0.190740 | -0.074032 | 0.113055 | 0.140291 | -0.696275 | 0.166679 |
| 252 | -0.905458 | -0.790575 | 0.206164 | -0.723816 | -0.444860 | 0.107833 | -0.734514 | -0.533865 | -0.634334 | 0.320526 | 0.088428 | -0.348210 | 0.347201 |
| 253 | -1.378235 | -0.338405 | 0.016815 | -0.394563 | 0.034043 | 1.023865 | -0.303960 | -1.316121 | 0.198697 | 0.670577 | 0.809574 | 0.580565 | 0.056004 |
| 254 | -0.199959 | -2.035812 | -0.904507 | -1.511975 | -0.437843 | 0.262972 | -1.943788 | -1.963300 | -2.256227 | 0.354369 | -0.039829 | 0.882325 | 0.139307 |
255 rows × 13 columns
WSSs = []
for i in range(1,15) :
km = KMeans(n_clusters=i, random_state=0)
km.fit(X)
WSSs.append(km.inertia_)
WSSs
[3315.0, 2972.7888695817974, 2748.18187155972, 2544.9420084212106, 2413.687059384553, 2278.037996783226, 2213.3487507256823, 2123.4282707474663, 2067.8299633414163, 1977.777252698108, 1956.5229777214513, 1880.0296166971755, 1815.5096049846275, 1785.9955747862728]
plt.figure(figsize=(12,12))
plt.plot(range(1, 15), WSSs)
[<matplotlib.lines.Line2D at 0x1e82ae84f98>]
K=6
kmeans_mfcc = KMeans(n_clusters=6, random_state=0, n_init=10)
kmeans_mfcc.fit(X)
KMeans(algorithm='auto', copy_x=True, init='k-means++', max_iter=300,
n_clusters=6, n_init=10, n_jobs=1, precompute_distances='auto',
random_state=0, tol=0.0001, verbose=0)
kmeans_mfcc.labels_
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
clusters_mfcc = kmeans_mfcc.predict(X)
clusters_mfcc
array([4, 2, 2, 2, 0, 1, 1, 0, 1, 4, 4, 4, 1, 4, 4, 3, 4, 4, 4, 4, 4, 0,
0, 4, 4, 4, 4, 3, 4, 3, 4, 4, 3, 1, 3, 1, 4, 4, 3, 1, 1, 1, 4, 4,
2, 1, 1, 1, 4, 1, 1, 1, 4, 3, 3, 4, 4, 1, 4, 1, 0, 4, 4, 4, 3, 3,
3, 4, 3, 0, 4, 2, 1, 4, 0, 4, 4, 3, 3, 0, 0, 2, 1, 4, 1, 0, 0, 3,
4, 1, 4, 4, 4, 4, 1, 1, 0, 3, 3, 3, 4, 1, 1, 4, 1, 4, 0, 3, 1, 1,
1, 0, 3, 2, 4, 4, 0, 1, 5, 3, 4, 0, 0, 2, 1, 0, 0, 0, 2, 2, 2, 2,
4, 4, 0, 0, 2, 2, 2, 4, 4, 4, 2, 2, 2, 3, 0, 4, 1, 3, 3, 3, 3, 3,
4, 3, 1, 0, 0, 0, 1, 1, 0, 2, 2, 0, 3, 0, 1, 0, 0, 0, 0, 1, 3, 2,
4, 4, 1, 2, 4, 3, 2, 3, 4, 4, 4, 1, 2, 3, 3, 0, 2, 2, 2, 1, 0, 1,
4, 2, 0, 4, 3, 3, 2, 3, 5, 5, 2, 3, 3, 2, 0, 3, 0, 2, 4, 2, 1, 2,
1, 3, 4, 4, 4, 1, 0, 1, 2, 2, 3, 0, 4, 2, 2, 4, 0, 2, 2, 2, 1, 3,
4, 4, 2, 2, 2, 4, 0, 0, 2, 2, 2, 2, 2])
X.loc[:,'Cluster'] = clusters_mfcc
X.loc[:,'chosen'] = list(y)
X
| mfccfiles_1 | mfccfiles_2 | mfccfiles_3 | mfccfiles_4 | mfccfiles_5 | mfccfiles_6 | mfccfiles_7 | mfccfiles_8 | mfccfiles_9 | mfccfiles_10 | mfccfiles_11 | mfccfiles_12 | mfccfiles_13 | Cluster | chosen | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.339415 | 0.847773 | 0.497198 | -0.389310 | 1.225458 | 1.947033 | -0.736267 | 0.492219 | 0.576682 | 1.504697 | -1.796460 | 0.724954 | 0.958600 | 4 | 0 |
| 1 | 0.587658 | -1.195426 | 0.636375 | 0.199876 | 0.765321 | 0.061181 | 0.379367 | -0.440867 | 0.232893 | 1.339920 | 0.110001 | 0.807525 | 0.815678 | 2 | 0 |
| 2 | 1.465595 | -2.307943 | 0.354567 | -0.058273 | -1.298853 | -0.811453 | -1.551580 | -3.934320 | -1.079432 | 2.546130 | 1.421407 | 0.639359 | 0.199094 | 2 | 0 |
| 3 | 0.749403 | -1.690498 | -0.125200 | -1.016135 | 0.825845 | 0.271444 | -0.104786 | -0.992141 | 0.049182 | 1.425948 | -0.343269 | -0.789558 | -0.411898 | 2 | 0 |
| 4 | -0.280577 | 0.393332 | 0.744917 | 2.411400 | -0.777421 | -0.420018 | 1.258355 | -1.544565 | -0.498071 | 0.421527 | -0.632908 | -0.056846 | -0.072348 | 0 | 0 |
| 5 | -0.158690 | 0.404891 | -0.147920 | -0.299241 | -0.786974 | 0.697216 | 0.290501 | 0.019739 | -1.468086 | -0.346174 | -0.086965 | 0.026492 | 1.019512 | 1 | 0 |
| 6 | 1.646777 | 0.772744 | -1.425228 | -0.562610 | -1.556076 | 0.533289 | -0.404271 | 1.676958 | 0.979516 | 0.415548 | 0.544719 | 0.433332 | 0.204271 | 1 | 0 |
| 7 | 1.124970 | 0.506236 | 0.738993 | 1.984485 | -0.928706 | -0.494097 | -0.707105 | -0.494778 | -1.642929 | 0.207467 | 0.181382 | 2.431721 | 0.848697 | 0 | 0 |
| 8 | 0.920059 | 1.438862 | -2.048354 | 1.503567 | -2.801303 | 0.567132 | -0.745441 | 0.569519 | 0.130917 | 1.965436 | -0.034797 | 1.164878 | 0.074074 | 1 | 0 |
| 9 | 0.182544 | 0.310622 | 0.067722 | 0.870138 | 0.168366 | 0.682045 | -0.191296 | -0.144962 | -0.630020 | -0.284032 | -0.315301 | 0.344841 | 0.495167 | 4 | 0 |
| 10 | 0.168663 | 0.389450 | 0.034360 | 1.213392 | 0.248437 | 0.870618 | -0.460824 | -0.174734 | -0.710502 | -0.228408 | -0.265153 | 0.349416 | 0.584114 | 4 | 0 |
| 11 | 0.153010 | -0.118336 | 0.639531 | 1.504522 | 0.937909 | 0.356048 | -0.089987 | -0.628522 | 0.064203 | 0.966049 | 0.403915 | -0.943626 | 0.173874 | 4 | 0 |
| 12 | 0.132578 | 0.261966 | -2.871493 | -3.398160 | -0.256458 | 1.596532 | -0.358711 | 0.175955 | -0.499075 | 0.949085 | 2.235525 | -0.197712 | -0.272366 | 1 | 0 |
| 13 | 1.094629 | 0.885150 | -1.130672 | -0.083270 | 0.672482 | 0.750453 | -0.863949 | 0.140540 | 0.423312 | -0.305155 | -0.424905 | 0.318660 | 0.885900 | 4 | 0 |
| 14 | 0.771472 | 0.364448 | -0.454696 | 0.434253 | 0.912699 | 0.745924 | -0.073390 | -0.406473 | 0.450765 | 0.323180 | -0.458826 | -0.132295 | 0.495454 | 4 | 0 |
| 15 | 0.677561 | 0.166795 | 0.746471 | 0.075191 | 0.867924 | -1.621678 | 0.771146 | -0.067286 | 0.557998 | -0.093593 | 0.020233 | -0.800013 | -0.629188 | 3 | 0 |
| 16 | -0.032353 | 1.227345 | -0.188580 | 0.927210 | 0.016663 | 1.001867 | -0.473811 | 0.782387 | 1.542760 | -0.345478 | -0.838104 | -0.439443 | 1.179204 | 4 | 0 |
| 17 | 0.459031 | 1.258961 | -0.329412 | 1.391790 | -0.208888 | 1.059241 | -1.245671 | 0.619153 | 0.245780 | 0.644548 | -0.602629 | -0.928581 | 0.739885 | 4 | 0 |
| 18 | -0.359172 | 0.051214 | -0.603962 | 0.778896 | 1.630471 | 1.802477 | 1.486205 | -0.140738 | -0.894366 | 0.736624 | 2.114721 | 1.078175 | -0.965785 | 4 | 0 |
| 19 | 0.209859 | -0.615399 | -0.676895 | 0.735655 | 0.805509 | -0.696793 | 1.073068 | 0.240429 | -0.205934 | -0.759693 | 0.672843 | 0.569482 | -0.455391 | 4 | 0 |
| 20 | 0.127381 | -0.265099 | -0.258801 | -0.127568 | 0.649447 | 0.244473 | 1.897421 | -0.344616 | -0.593159 | 0.065147 | 1.787607 | 1.219355 | -0.171813 | 4 | 0 |
| 21 | 1.222717 | 0.409860 | 1.311826 | 0.703873 | 0.322062 | 0.305461 | -0.522644 | -0.750833 | 0.001767 | 0.017953 | 0.254329 | -0.227762 | -0.614790 | 0 | 0 |
| 22 | 1.173352 | 0.490500 | 0.742825 | -0.028159 | -0.272396 | -0.502733 | -0.759443 | -1.031924 | -0.157975 | 0.075659 | 0.604220 | 0.143298 | -0.001849 | 0 | 0 |
| 23 | 1.069960 | 0.858822 | -0.795544 | 0.076688 | 0.851875 | 0.735014 | -0.758779 | 0.065595 | 0.532667 | -0.391858 | -0.497019 | 0.240822 | 0.848126 | 4 | 0 |
| 24 | 0.581377 | -0.804045 | 0.399887 | 1.535671 | 0.245878 | 0.904192 | -0.233991 | -0.925983 | 0.212280 | 0.499535 | -0.024926 | -0.925999 | 1.294925 | 4 | 0 |
| 25 | 0.161110 | 0.025075 | 0.716318 | 1.532230 | 0.889883 | 0.353167 | -0.058787 | -0.593046 | 0.093773 | 0.927085 | 0.199691 | -0.979872 | 0.232850 | 4 | 0 |
| 26 | 0.431443 | 0.442713 | 0.259120 | 0.045533 | 0.102675 | 0.367606 | 0.054320 | 0.942924 | 0.180609 | 0.550983 | 0.265291 | 0.321252 | -0.830969 | 4 | 0 |
| 27 | 0.344525 | -1.140315 | -0.725453 | -0.547965 | 0.449924 | 0.303904 | 1.053624 | 1.051712 | 0.509322 | 0.181611 | -0.519979 | -1.134490 | -1.439105 | 3 | 0 |
| 28 | -0.041565 | 0.671274 | 0.195143 | 0.247294 | 0.531620 | 1.050124 | 0.311358 | 0.988161 | -0.198869 | 0.387795 | 1.757366 | 1.351684 | 0.194840 | 4 | 0 |
| 29 | 0.417845 | -1.134173 | -0.760709 | -0.605264 | 0.077464 | 0.533333 | 1.104524 | 2.124971 | 0.083548 | 0.801730 | 0.092534 | -1.281628 | -1.468782 | 3 | 0 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 225 | 1.532114 | -1.060006 | -0.434145 | -0.999435 | -1.259462 | 0.039140 | -0.802013 | -0.655286 | 0.714448 | 1.005958 | -0.086372 | 0.537392 | 0.054440 | 1 | 1 |
| 226 | -0.942320 | 1.172080 | 0.506725 | -0.230675 | -0.104635 | 0.898742 | -1.107001 | -1.182148 | -0.940991 | 0.232366 | 1.778224 | 0.975251 | 1.731084 | 0 | 1 |
| 227 | 1.421974 | 0.631029 | -0.563813 | -0.694595 | -0.673270 | 0.929022 | 0.476907 | -1.025173 | -0.813644 | -0.060006 | -0.738730 | -0.558099 | 0.057654 | 1 | 1 |
| 228 | -1.473385 | -0.806223 | 1.849423 | -1.252541 | 0.941013 | -0.872947 | -1.812392 | -0.242718 | -0.097212 | -0.510500 | -0.232195 | -0.546399 | 0.945530 | 2 | 1 |
| 229 | -1.135926 | -0.772372 | 1.164844 | -1.022517 | 0.630202 | -0.496999 | -1.101656 | -0.168921 | -0.295159 | -0.587401 | 0.369033 | -0.266325 | 0.604469 | 2 | 1 |
| 230 | -1.085049 | 0.879566 | 0.442593 | 0.128917 | 0.393498 | 0.531555 | 0.392194 | 1.418515 | 0.891015 | -0.348926 | -0.756201 | -0.838584 | -0.015971 | 3 | 1 |
| 231 | -0.352258 | 0.556982 | 0.530520 | 0.443818 | 0.300921 | 0.032128 | -0.797384 | -0.573532 | 0.398084 | 0.328875 | -0.274964 | -1.300920 | 0.254456 | 0 | 1 |
| 232 | -1.190363 | 0.797356 | 0.758472 | 0.587917 | 0.890540 | 0.471925 | 0.105793 | 0.680721 | 0.230834 | -0.150709 | -0.816744 | -0.470618 | 0.371198 | 4 | 1 |
| 233 | -0.651003 | -0.586618 | 1.326854 | -0.451354 | 0.507113 | 0.165474 | -0.919675 | -0.448249 | -1.310940 | -1.372737 | 0.406029 | -1.414627 | -0.434858 | 2 | 1 |
| 234 | -1.459511 | -0.516281 | 1.631699 | -1.141842 | 0.584621 | -0.458541 | -1.428877 | -0.934556 | -0.216455 | -0.049794 | 0.095580 | 0.387068 | 0.693730 | 2 | 1 |
| 235 | -0.726984 | 0.702447 | 0.798069 | -0.320660 | 0.530902 | 1.019988 | 0.144995 | 0.207847 | 0.039592 | 0.220761 | 0.762941 | 0.575034 | 0.671517 | 4 | 1 |
| 236 | -0.300986 | -0.404923 | 0.715406 | 0.245380 | -0.427936 | -0.334843 | -0.228084 | -0.330898 | -0.674327 | 0.199560 | 0.827455 | 0.016433 | 0.866789 | 0 | 1 |
| 237 | -0.736244 | 0.088611 | 0.910051 | 0.437100 | 0.258256 | 0.363828 | -0.415290 | -0.717445 | -0.012727 | 0.436925 | -0.786954 | -1.217376 | 0.352825 | 2 | 1 |
| 238 | 0.610473 | -2.664315 | 1.303652 | -2.022376 | 1.500032 | -1.280926 | -1.249533 | 0.432111 | -0.768558 | 0.291156 | -0.092312 | 0.053770 | -0.401166 | 2 | 1 |
| 239 | -2.045424 | -2.954642 | 0.302601 | -0.868092 | -1.038134 | -1.230777 | 0.514329 | 0.057591 | -1.023895 | 0.275395 | -1.450282 | 0.386242 | 0.318763 | 2 | 1 |
| 240 | 0.329793 | -1.367570 | -1.454329 | -0.207924 | -0.723609 | -0.149025 | -0.085298 | -0.011595 | -0.240239 | -0.009120 | -0.325229 | -0.025722 | 0.114182 | 1 | 1 |
| 241 | -1.919591 | 1.382172 | -0.134161 | 0.837967 | -0.687780 | 0.944303 | -0.258652 | -0.742178 | 0.386031 | -1.178099 | -1.843543 | -0.710556 | -0.318561 | 3 | 1 |
| 242 | -2.087669 | 1.400006 | -0.494964 | 0.451717 | -0.759188 | 0.736625 | 0.133121 | -0.196031 | 1.121231 | 0.474128 | -0.345937 | -0.409324 | -0.442069 | 4 | 1 |
| 243 | -2.131652 | 0.439305 | -0.612226 | 0.854126 | -0.494550 | 0.825299 | 0.301373 | -0.018964 | 0.690556 | -0.078762 | -0.709495 | -0.075857 | -0.418656 | 4 | 1 |
| 244 | -1.611989 | -0.756403 | -0.410917 | 1.075909 | 0.297336 | -1.317576 | 1.115011 | -0.467065 | -0.768378 | 1.615499 | 1.611125 | -1.018782 | -1.798744 | 2 | 1 |
| 245 | -0.142010 | 0.000190 | -0.063461 | -0.506353 | -0.386942 | -0.256144 | 0.270621 | -1.497417 | 0.507892 | 0.456828 | -0.431169 | -0.978417 | 0.015849 | 2 | 1 |
| 246 | -1.263975 | -1.168117 | -1.396090 | -0.312016 | 1.862268 | 1.400290 | 0.646060 | -0.686864 | 0.418524 | -0.069926 | -0.653856 | -0.853617 | -0.106814 | 2 | 1 |
| 247 | -0.507700 | 0.899825 | 1.510153 | 1.083642 | 2.081451 | 0.589016 | 0.901321 | 0.658808 | 0.152596 | 0.176442 | -0.447633 | 0.287838 | 0.650479 | 4 | 1 |
| 248 | -0.159768 | 0.518093 | 2.197018 | 0.698491 | 0.476336 | -2.014255 | -1.614667 | -0.397282 | -1.781932 | -0.208894 | 1.650551 | -0.771436 | -0.987237 | 0 | 1 |
| 249 | -1.037899 | 1.016712 | 2.774230 | 0.665468 | -0.385673 | 0.587263 | -0.121609 | -0.331379 | 0.622484 | -0.387131 | -0.276584 | 0.218207 | 1.689216 | 0 | 1 |
| 250 | -0.526923 | -1.169944 | 0.474875 | -0.789231 | 0.369827 | -0.537003 | -1.089843 | -0.173366 | -0.023237 | -0.142334 | 0.740065 | 0.813114 | 0.872556 | 2 | 1 |
| 251 | -0.770856 | -1.024349 | -0.019140 | -0.097521 | 0.092703 | 0.369242 | -0.273901 | 0.190740 | -0.074032 | 0.113055 | 0.140291 | -0.696275 | 0.166679 | 2 | 1 |
| 252 | -0.905458 | -0.790575 | 0.206164 | -0.723816 | -0.444860 | 0.107833 | -0.734514 | -0.533865 | -0.634334 | 0.320526 | 0.088428 | -0.348210 | 0.347201 | 2 | 1 |
| 253 | -1.378235 | -0.338405 | 0.016815 | -0.394563 | 0.034043 | 1.023865 | -0.303960 | -1.316121 | 0.198697 | 0.670577 | 0.809574 | 0.580565 | 0.056004 | 2 | 1 |
| 254 | -0.199959 | -2.035812 | -0.904507 | -1.511975 | -0.437843 | 0.262972 | -1.943788 | -1.963300 | -2.256227 | 0.354369 | -0.039829 | 0.882325 | 0.139307 | 2 | 1 |
255 rows × 15 columns
stacked = X.groupby(['chosen','Cluster']).size().reset_index()
pivot_df = stacked.pivot(index='Cluster', columns='chosen', values=0)
pivot_df.loc[:,[0,1]].plot.bar(stacked=True, figsize=(10,7))
<matplotlib.axes._subplots.AxesSubplot at 0x1e82aec1940>